From 605a7fb56255a8628f72dd823d1680f55fc4a255 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Mon, 3 Jun 2024 12:06:05 -0700 Subject: [PATCH 01/76] send to bk --- README.md | 4 +- dbt_project.yml | 3 +- integration_tests/dbt_project.yml | 3 +- integration_tests/seeds/audit_log_data.csv | 6 + .../int_zendesk__schedule_history.sql | 137 ++++++++++++++++++ packages.yml | 9 +- 6 files changed, 155 insertions(+), 7 deletions(-) create mode 100644 integration_tests/seeds/audit_log_data.csv create mode 100644 models/intermediate/int_zendesk__schedule_history.sql diff --git a/README.md b/README.md index 8c0b9fdf..1dc17085 100644 --- a/README.md +++ b/README.md @@ -64,7 +64,7 @@ Include the following zendesk package version in your `packages.yml` file: ```yml packages: - package: fivetran/zendesk - version: [">=0.16.0", "<0.17.0"] + version: [">=0.17.0", "<0.18.0"] ``` > **Note**: Do not include the Zendesk Support source package. The Zendesk Support transform package already has a dependency on the source in its own `packages.yml` file. @@ -211,7 +211,7 @@ This dbt package is dependent on the following dbt packages. Please be aware tha ```yml packages: - package: fivetran/zendesk_source - version: [">=0.11.0", "<0.12.0"] + version: [">=0.12.0", "<0.13.0"] - package: fivetran/fivetran_utils version: [">=0.4.0", "<0.5.0"] diff --git a/dbt_project.yml b/dbt_project.yml index a18ac5f2..9dc5ec4f 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,5 +1,5 @@ name: 'zendesk' -version: '0.16.0' +version: '0.17.0' config-version: 2 @@ -47,6 +47,7 @@ vars: user: "{{ ref('stg_zendesk__user') }}" daylight_time: "{{ ref('stg_zendesk__daylight_time') }}" time_zone: "{{ ref('stg_zendesk__time_zone') }}" + audit_log: "{{ ref('stg_zendesk__audit_log) }}" using_schedules: true using_domain_names: true using_user_tags: true diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 53532764..28d0473b 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -1,7 +1,7 @@ config-version: 2 name: 'zendesk_integration_tests' -version: '0.16.0' +version: '0.17.0' profile: 'integration_tests' @@ -25,6 +25,7 @@ vars: zendesk_organization_tag_identifier: "organization_tag_data" zendesk_user_identifier: "user_data" zendesk_user_tag_identifier: "user_tag_data" + zendesk_audit_log_identifier: "audit_log_data" ## For validation testing. To be commented out before release. # zendesk_schema: zendesk_test_env diff --git a/integration_tests/seeds/audit_log_data.csv b/integration_tests/seeds/audit_log_data.csv new file mode 100644 index 00000000..a0bef688 --- /dev/null +++ b/integration_tests/seeds/audit_log_data.csv @@ -0,0 +1,6 @@ +id,_fivetran_synced,action,actor_id,change_description,created_at,source_id,source_label,source_type +579796,2024-05-28 21:53:06.793000,update,37253,"Workweek changed from {:sun=&gt;{""01:45""=&gt;""02:45""}, :mon=&gt;{""09:00""=&gt;""20:00""}, :tue=&gt;{""09:00""=&gt;""20:00""}, :wed=&gt;{""08:00""=&gt;""20:00""}, :thu=&gt;{""08:00""=&gt;""20:00""}, :fri=&gt;{""08:00""=&gt;""20:00""}} to {:sun=&gt;{""03:00""=&gt;""04:00""}, :mon=&gt;{""08:00""=&gt;""20:00""}, :tue=&gt;{""08:00""=&gt;""20:00""}, :wed=&gt;{""07:15""=&gt;""20:00""}, :thu=&gt;{""07:15""=&gt;""20:00""}, :fri=&gt;{""07:15""=&gt;""20:00""}}",2024-05-28 21:51:37.000000,18542,Workweek: Central US Schedule,zendesk/business_hours/workweek +2679952,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {:thu=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}, :mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :wed=&gt;{""09:00""=&gt;""17:00""}} to {:mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :wed=&gt;{""09:00""=&gt;""17:00""}, :thu=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}}",2024-05-21 11:20:29.000000,267996,Workweek: New schedule here,zendesk/business_hours/workweek +293556,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {} to {:mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :wed=&gt;{""09:00""=&gt;""17:00""}, :thu=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}}",2024-05-21 11:20:28.000000,267996,Workweek: New schedule here,zendesk/business_hours/workweek +4441364,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {:wed=&gt;{""09:00""=&gt;""17:00""}, :thu=&gt;{""09:00""=&gt;""17:00""}, :mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}} to {:mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :wed=&gt;{""09:00""=&gt;""17:00""}, :thu=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}}",2024-05-21 11:20:10.000000,267996,Workweek: New schedule 2,zendesk/business_hours/workweek +70900,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {} to {:mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :wed=&gt;{""09:00""=&gt;""17:00""}, :thu=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}}",2024-05-21 11:20:09.000000,267996,Workweek: New schedule 2,zendesk/business_hours/workweek \ No newline at end of file diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql new file mode 100644 index 00000000..eb9a4e52 --- /dev/null +++ b/models/intermediate/int_zendesk__schedule_history.sql @@ -0,0 +1,137 @@ +with reformat as ( + + select + _fivetran_synced, + created_at, + source_id as schedule_id, + change_description as original, + replace(replace(replace(replace(replace(replace(replace(replace(lower(change_description), '=&gt;', ': '), + ':mon', '"mon"'), ':tue', '"tue"'), ':wed', '"wed"'), ':thu', '"thu"'), ':fri', '"fri"'), ':sat', '"sat"'), ':sun', '"sun"') as change_description + + from {{ ref('stg_zendesk__audit_log') }} + where lower(change_description) like '%workweek%' + order by created_at desc +), + +jsonify as ( + + select + _fivetran_synced, + created_at, + schedule_id, + original, + {{ dbt.split_part('change_description', "'workweek changed from '", 2) }} as change_description + from reformat +), + +split_up as ( + + select + _fivetran_synced, + created_at, + schedule_id, + original, + {{ dbt.split_part('change_description', "' to '", 1) }} as from_schedule, + {{ dbt.split_part('change_description', "' to '", 2) }} as to_schedule + from jsonify +), + +split_days as ( + + select + _fivetran_synced, + created_at, + schedule_id, + original + {%- for day in ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] -%} + , REGEXP_EXTRACT(json_extract(from_schedule, '$.{{ day }}'), r'{"([^"]+)"') as from_{{ day }}_start + , REGEXP_EXTRACT(json_extract(from_schedule, '$.{{ day }}'), r'":"([^"]+)"}') as from_{{ day }}_end + , REGEXP_EXTRACT(json_extract(to_schedule, '$.{{ day }}'), r'{"([^"]+)"') as to_{{ day }}_start + , REGEXP_EXTRACT(json_extract(to_schedule, '$.{{ day }}'), r'":"([^"]+)"}') as to_{{ day }}_end + {% endfor %} + + from split_up +), + +verticalize as ( + + {%- for day in ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] -%} + select + _fivetran_synced, + lag(created_at) over (partition by schedule_id order by created_at) as created_at, + schedule_id, + original, + '{{ day }}' as dow, + from_{{ day }}_start as start_time, + from_{{ day }}_end as end_time + + from split_days + where from_{{ day }}_start is not null and from_{{ day }}_end is not null + + union distinct + + select + _fivetran_synced, + created_at, + schedule_id, + original, + '{{ day }}' as dow, + to_{{ day }}_start as start_time, + to_{{ day }}_end as end_time + + from split_days + where to_{{ day }}_start is not null and to_{{ day }}_end is not null + + {% if not loop.last %}union distinct{% endif %} + + {% endfor %} +), + +split_times as ( + + select + schedule_id, + cast({{ dbt.split_part('start_time', "':'", 1) }} as {{ dbt.type_int() }}) as start_time_hh, + cast({{ dbt.split_part('start_time', "':'", 2) }} as {{ dbt.type_int() }}) as start_time_mm, + cast({{ dbt.split_part('end_time', "':'", 1) }} as {{ dbt.type_int() }}) as end_time_hh, + cast({{ dbt.split_part('end_time', "':'", 2) }} as {{ dbt.type_int() }}) as end_time_mm, + start_time, + end_time, + dow, + _fivetran_synced, + created_at as valid_from, + coalesce(lead(created_at) over (partition by schedule_id, dow order by created_at), {{ dbt.current_timestamp_backcompat() }}) as valid_to + + from verticalize +), + +final as ( + + select + schedule_id, + start_time_hh * 60 + start_time_mm + 24 * 60 * case + when dow = 'mon' then 1 + when dow = 'tue' then 2 + when dow = 'wed' then 3 + when dow = 'thu' then 4 + when dow = 'fri' then 5 + when dow = 'sat' then 6 + else 0 end as start_time, + end_time_hh * 60 + end_time_mm + 24 * 60 * case + when dow = 'mon' then 1 + when dow = 'tue' then 2 + when dow = 'wed' then 3 + when dow = 'thu' then 4 + when dow = 'fri' then 5 + when dow = 'sat' then 6 + else 0 end as end_time, + coalesce(valid_from, '1970-01-01') as valid_from, + valid_to, + _fivetran_synced, + dow + + from split_times +) + +select * +from final \ No newline at end of file diff --git a/packages.yml b/packages.yml index 54ef10f0..c6364b14 100644 --- a/packages.yml +++ b/packages.yml @@ -1,6 +1,9 @@ packages: - - package: fivetran/zendesk_source - version: [">=0.11.0", "<0.12.0"] - + # - package: fivetran/zendesk_source + # version: [">=0.11.0", "<0.12.0"] + - git: https://github.com/fivetran/dbt_zendesk_source.git + revision: explore/audit-log-spike + warn-unpinned: false + # - local: ../dbt_zendesk_source - package: calogica/dbt_date version: [">=0.9.0", "<1.0.0"] From c58b4b6d1ff08d79213a0ab2b2ceb8163e900577 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Mon, 3 Jun 2024 14:30:29 -0700 Subject: [PATCH 02/76] try this out --- macros/json_parse_nonscalar.sql | 41 +++++++++++++++++++ macros/regex_extract.sql | 17 ++++++++ .../int_zendesk__schedule_history.sql | 8 ++-- .../int_zendesk__schedule_spine.sql | 1 + 4 files changed, 63 insertions(+), 4 deletions(-) create mode 100644 macros/json_parse_nonscalar.sql create mode 100644 macros/regex_extract.sql diff --git a/macros/json_parse_nonscalar.sql b/macros/json_parse_nonscalar.sql new file mode 100644 index 00000000..00ffc007 --- /dev/null +++ b/macros/json_parse_nonscalar.sql @@ -0,0 +1,41 @@ +{% macro json_parse_nonscalar(string, string_path) -%} + +{{ adapter.dispatch('json_parse_nonscalar', 'zendesk') (string, string_path) }} + +{%- endmacro %} + +{% macro default__json_parse_nonscalar(string, string_path) %} + + json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} ) + +{% endmacro %} + +{% macro redshift__json_parse_nonscalar(string, string_path) %} + + json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} ) + +{% endmacro %} + +{% macro bigquery__json_parse_nonscalar(string, string_path) %} + + json_extract({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ') + +{% endmacro %} + +{% macro postgres__json_parse_nonscalar(string, string_path) %} + + {{string}}::json #>> '{ {%- for s in string_path -%}{{ s }}{%- if not loop.last -%},{%- endif -%}{%- endfor -%} }' + +{% endmacro %} + +{% macro snowflake__json_parse_nonscalar(string, string_path) %} + + parse_json( {{string}} ) {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%} + +{% endmacro %} + +{% macro spark__json_parse_nonscalar(string, string_path) %} + + {{string}} : {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%} + +{% endmacro %} \ No newline at end of file diff --git a/macros/regex_extract.sql b/macros/regex_extract.sql new file mode 100644 index 00000000..ba307e67 --- /dev/null +++ b/macros/regex_extract.sql @@ -0,0 +1,17 @@ +{% macro regex_extract(string, start_or_end) -%} + +{{ adapter.dispatch('regex_extract', 'zendesk') (string, start_or_end) }} + +{%- endmacro %} + +{% macro default__regex_extract(string, start_or_end) %} + +REGEXP_EXTRACT({{ string }}, {%- if start_or_end == 'start' %} r'{"([^"]+)"' {% else %} r'":"([^"]+)"}' {% endif -%} ) + +{% endmacro %} + +{% macro bigquery__regex_extract(string, start_or_end) %} + +REGEXP_EXTRACT({{ string }}, {%- if start_or_end == 'start' %} r'{"([^"]+)"' {% else %} r'":"([^"]+)"}' {% endif -%} ) + +{% endmacro %} diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql index eb9a4e52..b3436e60 100644 --- a/models/intermediate/int_zendesk__schedule_history.sql +++ b/models/intermediate/int_zendesk__schedule_history.sql @@ -44,10 +44,10 @@ split_days as ( schedule_id, original {%- for day in ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] -%} - , REGEXP_EXTRACT(json_extract(from_schedule, '$.{{ day }}'), r'{"([^"]+)"') as from_{{ day }}_start - , REGEXP_EXTRACT(json_extract(from_schedule, '$.{{ day }}'), r'":"([^"]+)"}') as from_{{ day }}_end - , REGEXP_EXTRACT(json_extract(to_schedule, '$.{{ day }}'), r'{"([^"]+)"') as to_{{ day }}_start - , REGEXP_EXTRACT(json_extract(to_schedule, '$.{{ day }}'), r'":"([^"]+)"}') as to_{{ day }}_end + , REGEXP_EXTRACT({{ json_parse_nonscalar('from_schedule', [day]) }}, r'{"([^"]+)"') as from_{{ day }}_start + , REGEXP_EXTRACT({{ json_parse_nonscalar('from_schedule', [day]) }}, r'":"([^"]+)"}') as from_{{ day }}_end + , REGEXP_EXTRACT({{ json_parse_nonscalar('to_schedule', [day]) }}, r'{"([^"]+)"') as to_{{ day }}_start + , REGEXP_EXTRACT({{ json_parse_nonscalar('to_schedule', [day]) }}, r'":"([^"]+)"}') as to_{{ day }}_end {% endfor %} from split_up diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 0110f28e..de197c74 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -15,6 +15,7 @@ with timezone as ( select * from {{ var('daylight_time') }} +-- TODO: BRING SCHEDULE HISTORY INTO THIS MODEL LIKELY ), schedule as ( select * From 5f2113be9fd75c924a6afaefa4fc5e73abc35112 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Mon, 3 Jun 2024 15:09:28 -0700 Subject: [PATCH 03/76] first try --- macros/regex_extract.sql | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/macros/regex_extract.sql b/macros/regex_extract.sql index ba307e67..5a1e1029 100644 --- a/macros/regex_extract.sql +++ b/macros/regex_extract.sql @@ -15,3 +15,27 @@ REGEXP_EXTRACT({{ string }}, {%- if start_or_end == 'start' %} r'{"([^"]+)"' {% REGEXP_EXTRACT({{ string }}, {%- if start_or_end == 'start' %} r'{"([^"]+)"' {% else %} r'":"([^"]+)"}' {% endif -%} ) {% endmacro %} + +{% macro snowflake__regex_extract(string, start_or_end) %} + +REGEXP_SUBSTR({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% else %} '": "([^"]+)"' {% endif -%}, 1, 1, 'e', 1 ) + +{% endmacro %} + +{% macro postgres__regex_extract(string, start_or_end) %} + +(regexp_matches({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)":' {% else %} '": "([^"]+)' {% endif -%} ))[1] + +{% endmacro %} + +{% macro redshift__regex_extract(string, start_or_end) %} + +REGEXP_EXTRACT({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% else %} '": "([^"]+)"' {% endif -%}, 1, 1, 'e') + +{% endmacro %} + +{% macro spark__regex_extract(string, start_or_end) %} + +regexp_extract({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)":' {% else %} '": "([^"]+)"' {% endif -%}, 1) + +{% endmacro %} \ No newline at end of file From 662f3f2e3ce1260b36230932da6c7a3d2c1262f8 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Mon, 3 Jun 2024 15:13:51 -0700 Subject: [PATCH 04/76] use mcro --- models/intermediate/int_zendesk__schedule_history.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql index b3436e60..6a6a4179 100644 --- a/models/intermediate/int_zendesk__schedule_history.sql +++ b/models/intermediate/int_zendesk__schedule_history.sql @@ -44,10 +44,10 @@ split_days as ( schedule_id, original {%- for day in ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] -%} - , REGEXP_EXTRACT({{ json_parse_nonscalar('from_schedule', [day]) }}, r'{"([^"]+)"') as from_{{ day }}_start - , REGEXP_EXTRACT({{ json_parse_nonscalar('from_schedule', [day]) }}, r'":"([^"]+)"}') as from_{{ day }}_end - , REGEXP_EXTRACT({{ json_parse_nonscalar('to_schedule', [day]) }}, r'{"([^"]+)"') as to_{{ day }}_start - , REGEXP_EXTRACT({{ json_parse_nonscalar('to_schedule', [day]) }}, r'":"([^"]+)"}') as to_{{ day }}_end + , {{ regex_extract(json_parse_nonscalar('from_schedule', [day]), 'start') }} as from_{{ day }}_start + , {{ regex_extract(json_parse_nonscalar('from_schedule', [day]), 'end') }} as from_{{ day }}_end + , {{ regex_extract(json_parse_nonscalar('to_schedule', [day]), 'start') }} as to_{{ day }}_start + , {{ regex_extract(json_parse_nonscalar('to_schedule', [day]), 'end') }} as to_{{ day }}_end {% endfor %} from split_up From 5bfead91a1752f57c80fa56da1ddd941d81cdd15 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Mon, 3 Jun 2024 15:47:18 -0700 Subject: [PATCH 05/76] Testing --- .buildkite/scripts/run_models.sh | 2 +- integration_tests/dbt_project.yml | 4 +--- macros/regex_extract.sql | 4 ++-- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/.buildkite/scripts/run_models.sh b/.buildkite/scripts/run_models.sh index 719ead40..62200df4 100644 --- a/.buildkite/scripts/run_models.sh +++ b/.buildkite/scripts/run_models.sh @@ -22,4 +22,4 @@ dbt test --target "$db" dbt run --vars '{using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh dbt test --target "$db" -dbt run-operation fivetran_utils.drop_schemas_automation --target "$db" \ No newline at end of file +# dbt run-operation fivetran_utils.drop_schemas_automation --target "$db" \ No newline at end of file diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 28d0473b..1069db27 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -6,7 +6,7 @@ version: '0.17.0' profile: 'integration_tests' vars: - zendesk_schema: zendesk_integration_tests_50 + zendesk_schema: zz_dbt_jamie zendesk_source: zendesk_organization_identifier: "organization_data" zendesk_schedule_identifier: "schedule_data" @@ -43,8 +43,6 @@ models: seeds: +quote_columns: "{{ true if target.type == 'redshift' else false }}" zendesk_integration_tests: - +column_types: - _fivetran_synced: timestamp +column_types: _fivetran_synced: timestamp group_data: diff --git a/macros/regex_extract.sql b/macros/regex_extract.sql index 5a1e1029..08b851d8 100644 --- a/macros/regex_extract.sql +++ b/macros/regex_extract.sql @@ -18,7 +18,7 @@ REGEXP_EXTRACT({{ string }}, {%- if start_or_end == 'start' %} r'{"([^"]+)"' {% {% macro snowflake__regex_extract(string, start_or_end) %} -REGEXP_SUBSTR({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% else %} '": "([^"]+)"' {% endif -%}, 1, 1, 'e', 1 ) +REGEXP_SUBSTR({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% else %} '":"([^"]+)"' {% endif -%}, 1, 1, 'e', 1 ) {% endmacro %} @@ -30,7 +30,7 @@ REGEXP_SUBSTR({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% els {% macro redshift__regex_extract(string, start_or_end) %} -REGEXP_EXTRACT({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% else %} '": "([^"]+)"' {% endif -%}, 1, 1, 'e') +REGEXP_SUBSTR({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% else %} '": "([^"]+)"' {% endif -%}, 1, 1, 'e') {% endmacro %} From d36dc490d6eb13bc9b4d0d6cfa1f60f6855115ed Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Mon, 3 Jun 2024 15:48:01 -0700 Subject: [PATCH 06/76] puhs --- integration_tests/dbt_project.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 1069db27..28d0473b 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -6,7 +6,7 @@ version: '0.17.0' profile: 'integration_tests' vars: - zendesk_schema: zz_dbt_jamie + zendesk_schema: zendesk_integration_tests_50 zendesk_source: zendesk_organization_identifier: "organization_data" zendesk_schedule_identifier: "schedule_data" @@ -43,6 +43,8 @@ models: seeds: +quote_columns: "{{ true if target.type == 'redshift' else false }}" zendesk_integration_tests: + +column_types: + _fivetran_synced: timestamp +column_types: _fivetran_synced: timestamp group_data: From 700589f51c4bd78306dde663c88fdaa35bc2d166 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Mon, 3 Jun 2024 16:02:13 -0700 Subject: [PATCH 07/76] redshift --- macros/regex_extract.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/macros/regex_extract.sql b/macros/regex_extract.sql index 08b851d8..d14c052a 100644 --- a/macros/regex_extract.sql +++ b/macros/regex_extract.sql @@ -30,12 +30,12 @@ REGEXP_SUBSTR({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% els {% macro redshift__regex_extract(string, start_or_end) %} -REGEXP_SUBSTR({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% else %} '": "([^"]+)"' {% endif -%}, 1, 1, 'e') +REGEXP_SUBSTR({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% else %} '":"([^"]+)"' {% endif -%}, 1, 1, 'e') {% endmacro %} {% macro spark__regex_extract(string, start_or_end) %} -regexp_extract({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)":' {% else %} '": "([^"]+)"' {% endif -%}, 1) +regexp_extract({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)":' {% else %} '":"([^"]+)"' {% endif -%}, 1) {% endmacro %} \ No newline at end of file From f6ada0cfb342d1c3b2a2995739048ff2956a2a42 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Tue, 4 Jun 2024 09:48:50 -0700 Subject: [PATCH 08/76] try nullif --- models/intermediate/int_zendesk__schedule_history.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql index 6a6a4179..4104cb8c 100644 --- a/models/intermediate/int_zendesk__schedule_history.sql +++ b/models/intermediate/int_zendesk__schedule_history.sql @@ -91,10 +91,10 @@ split_times as ( select schedule_id, - cast({{ dbt.split_part('start_time', "':'", 1) }} as {{ dbt.type_int() }}) as start_time_hh, - cast({{ dbt.split_part('start_time', "':'", 2) }} as {{ dbt.type_int() }}) as start_time_mm, - cast({{ dbt.split_part('end_time', "':'", 1) }} as {{ dbt.type_int() }}) as end_time_hh, - cast({{ dbt.split_part('end_time', "':'", 2) }} as {{ dbt.type_int() }}) as end_time_mm, + cast(nullif({{ dbt.split_part('start_time', "':'", 1) }}, ' ') as {{ dbt.type_int() }}) as start_time_hh, + cast(nullif({{ dbt.split_part('start_time', "':'", 2) }}, ' ') as {{ dbt.type_int() }}) as start_time_mm, + cast(nullif({{ dbt.split_part('end_time', "':'", 1) }}, ' ') as {{ dbt.type_int() }}) as end_time_hh, + cast(nullif({{ dbt.split_part('end_time', "':'", 2) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm, start_time, end_time, dow, From f07271c86383e97816c8e4f30275acd656732424 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 11 Sep 2024 10:00:25 -0500 Subject: [PATCH 09/76] feature/historical-schedules --- .../int_zendesk__timezones_w_dt.sql | 94 +++++++++++++++++++ 1 file changed, 94 insertions(+) create mode 100644 models/intermediate/int_zendesk__timezones_w_dt.sql diff --git a/models/intermediate/int_zendesk__timezones_w_dt.sql b/models/intermediate/int_zendesk__timezones_w_dt.sql new file mode 100644 index 00000000..a8cc91c6 --- /dev/null +++ b/models/intermediate/int_zendesk__timezones_w_dt.sql @@ -0,0 +1,94 @@ +{{ config(enabled=var('using_schedules', True)) }} + +/* + The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings. + End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time) +*/ + +with timezone as ( + + select * + from {{ var('time_zone') }} + +), daylight_time as ( + + select * + from {{ var('daylight_time') }} + +), timezone_with_dt as ( + + select + timezone.*, + daylight_time.daylight_start_utc, + daylight_time.daylight_end_utc, + daylight_time.daylight_offset_minutes + + from timezone + left join daylight_time + on timezone.time_zone = daylight_time.time_zone + +), order_timezone_dt as ( + + select + *, + -- will be null for timezones without any daylight savings records (and the first entry) + -- we will coalesce the first entry date with .... the X years ago + lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc, + -- will be null for timezones without any daylight savings records (and the last entry) + -- we will coalesce the last entry date with the current date + lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc + + from timezone_with_dt + +), split_timezones as ( + + -- standard (includes timezones without DT) + -- starts: when the last Daylight Savings ended + -- ends: when the next Daylight Savings starts + select + time_zone, + standard_offset_minutes as offset_minutes, + + -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT + coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from, + + -- daylight_start_utc is null for timezones that don't use DT + coalesce(daylight_start_utc, cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date)) as valid_until + + from order_timezone_dt + + union all + + -- DT (excludes timezones without it) + -- starts: when this Daylight Savings started + -- ends: when this Daylight Savings ends + select + time_zone, + -- Pacific Time is -8h during standard time and -7h during DT + standard_offset_minutes + daylight_offset_minutes as offset_minutes, + daylight_start_utc as valid_from, + daylight_end_utc as valid_until + + from order_timezone_dt + where daylight_offset_minutes is not null + + union all + + select + time_zone, + standard_offset_minutes as offset_minutes, + + -- Get the latest daylight_end_utc time and set that as the valid_from + max(daylight_end_utc) as valid_from, + + -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future. + cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date) as valid_until + + from order_timezone_dt + group by 1, 2 + -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979. + having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp_backcompat() }} as date) +) + +select * +from split_timezones \ No newline at end of file From cf894292e4754b54576b4769ce478cf00062f7a3 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 12 Sep 2024 00:36:47 -0500 Subject: [PATCH 10/76] rework schedule_history --- dbt_project.yml | 16 +- macros/clean_string.sql | 14 ++ macros/regex_extract.sql | 28 +-- macros/to_json_array.sql | 35 ++++ .../int_zendesk__schedule_history.sql | 170 ++++++++---------- 5 files changed, 143 insertions(+), 120 deletions(-) create mode 100644 macros/clean_string.sql create mode 100644 macros/to_json_array.sql diff --git a/dbt_project.yml b/dbt_project.yml index f1327bbb..ebfa3deb 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -33,24 +33,24 @@ vars: zendesk: ticket_field_history_columns: ['assignee_id', 'status', 'priority'] ticket_field_history_updater_columns: [] - group: "{{ ref('stg_zendesk__group') }}" + audit_log: "{{ ref('stg_zendesk__audit_log') }}" brand: "{{ ref('stg_zendesk__brand') }}" + daylight_time: "{{ ref('stg_zendesk__daylight_time') }}" domain_name: "{{ ref('stg_zendesk__domain_name') }}" + field_history: "{{ ref('stg_zendesk__ticket_field_history') }}" + group: "{{ ref('stg_zendesk__group') }}" organization_tag: "{{ ref('stg_zendesk__organization_tag') }}" organization: "{{ ref('stg_zendesk__organization') }}" - schedule: "{{ ref('stg_zendesk__schedule') }}" schedule_holiday: "{{ ref('stg_zendesk__schedule_holiday') }}" - ticket: "{{ ref('stg_zendesk__ticket') }}" - ticket_form_history: "{{ ref('stg_zendesk__ticket_form_history') }}" + schedule: "{{ ref('stg_zendesk__schedule') }}" ticket_comment: "{{ ref('stg_zendesk__ticket_comment') }}" - field_history: "{{ ref('stg_zendesk__ticket_field_history') }}" + ticket_form_history: "{{ ref('stg_zendesk__ticket_form_history') }}" ticket_schedule: "{{ ref('stg_zendesk__ticket_schedule') }}" ticket_tag: "{{ ref('stg_zendesk__ticket_tag') }}" + ticket: "{{ ref('stg_zendesk__ticket') }}" + time_zone: "{{ ref('stg_zendesk__time_zone') }}" user_tag: "{{ ref('stg_zendesk__user_tag') }}" user: "{{ ref('stg_zendesk__user') }}" - daylight_time: "{{ ref('stg_zendesk__daylight_time') }}" - time_zone: "{{ ref('stg_zendesk__time_zone') }}" - audit_log: "{{ ref('stg_zendesk__audit_log) }}" using_schedules: true using_domain_names: true using_user_tags: true diff --git a/macros/clean_string.sql b/macros/clean_string.sql new file mode 100644 index 00000000..9b3b45f8 --- /dev/null +++ b/macros/clean_string.sql @@ -0,0 +1,14 @@ +{% macro clean_string(string_field, character_list) -%} + {{ return(adapter.dispatch('clean_string', 'zendesk')(string_field, character_list)) }} +{%- endmacro %} + +{% macro default__clean_string(string_field, character_list) %} + {% for character in character_list -%} + replace( + {%- endfor -%} + {{ string_field }} + {% for character in character_list -%} + , {{ "'" ~ character ~ "'"}}, '') + {%- endfor -%} + +{% endmacro %} \ No newline at end of file diff --git a/macros/regex_extract.sql b/macros/regex_extract.sql index d14c052a..8342370f 100644 --- a/macros/regex_extract.sql +++ b/macros/regex_extract.sql @@ -1,41 +1,41 @@ -{% macro regex_extract(string, start_or_end) -%} +{% macro regex_extract(string, regex) -%} -{{ adapter.dispatch('regex_extract', 'zendesk') (string, start_or_end) }} +{{ adapter.dispatch('regex_extract', 'zendesk') (string, regex) }} {%- endmacro %} -{% macro default__regex_extract(string, start_or_end) %} +{% macro default__regex_extract(string, regex) %} -REGEXP_EXTRACT({{ string }}, {%- if start_or_end == 'start' %} r'{"([^"]+)"' {% else %} r'":"([^"]+)"}' {% endif -%} ) + regexp_extract({{ string }}, {{ regex }} ) {% endmacro %} -{% macro bigquery__regex_extract(string, start_or_end) %} +{% macro bigquery__regex_extract(string, regex) %} -REGEXP_EXTRACT({{ string }}, {%- if start_or_end == 'start' %} r'{"([^"]+)"' {% else %} r'":"([^"]+)"}' {% endif -%} ) + regexp_extract({{ string }}, {{ regex }} ) {% endmacro %} -{% macro snowflake__regex_extract(string, start_or_end) %} +{% macro snowflake__regex_extract(string, regex) %} -REGEXP_SUBSTR({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% else %} '":"([^"]+)"' {% endif -%}, 1, 1, 'e', 1 ) + REGEXP_SUBSTR({{ string }}, {{ regex }}, 1, 1, 'e', 1 ) {% endmacro %} -{% macro postgres__regex_extract(string, start_or_end) %} +{% macro postgres__regex_extract(string, regex) %} -(regexp_matches({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)":' {% else %} '": "([^"]+)' {% endif -%} ))[1] + (regexp_matches({{ string }}, {{ regex }}))[1] {% endmacro %} -{% macro redshift__regex_extract(string, start_or_end) %} +{% macro redshift__regex_extract(string, regex) %} -REGEXP_SUBSTR({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)"' {% else %} '":"([^"]+)"' {% endif -%}, 1, 1, 'e') + REGEXP_SUBSTR({{ string }}, {{ regex }}, 1, 1, 'e') {% endmacro %} -{% macro spark__regex_extract(string, start_or_end) %} +{% macro spark__regex_extract(string, regex) %} -regexp_extract({{ string }}, {%- if start_or_end == 'start' %} '"([^"]+)":' {% else %} '":"([^"]+)"' {% endif -%}, 1) + regexp_extract({{ string }}, {{ regex }}, 1) {% endmacro %} \ No newline at end of file diff --git a/macros/to_json_array.sql b/macros/to_json_array.sql new file mode 100644 index 00000000..8115ed2e --- /dev/null +++ b/macros/to_json_array.sql @@ -0,0 +1,35 @@ +{% macro to_json_array(string) -%} + +{{ adapter.dispatch('to_json_array', 'zendesk') (string) }} + +{%- endmacro %} + +{% macro default__to_json_array(string) %} + + json_extract_array({{ string }}, '$') + +{% endmacro %} + +{% macro redshift__to_json_array(string) %} + + json_parse({{ string }}) + +{% endmacro %} + +{% macro postgres__to_json_array(string) %} + + {{ string }}::jsonb + +{% endmacro %} + +{% macro snowflake__to_json_array(string) %} + + ARRAY_CONSTRUCT_PARSED({{ string }}) + +{% endmacro %} + +{% macro spark__to_json_array(string) %} + + JSON_ARRAY({{ string }}) + +{% endmacro %} \ No newline at end of file diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql index 4104cb8c..cf6bc158 100644 --- a/models/intermediate/int_zendesk__schedule_history.sql +++ b/models/intermediate/int_zendesk__schedule_history.sql @@ -1,134 +1,108 @@ -with reformat as ( +with audit_logs as ( + select * + from {{ var('audit_log') }} +), audit_logs_cleaned as ( select _fivetran_synced, created_at, source_id as schedule_id, change_description as original, - replace(replace(replace(replace(replace(replace(replace(replace(lower(change_description), '=&gt;', ': '), - ':mon', '"mon"'), ':tue', '"tue"'), ':wed', '"wed"'), ':thu', '"thu"'), ':fri', '"fri"'), ':sat', '"sat"'), ':sun', '"sun"') as change_description + replace( + {{ clean_string('lower(change_description)', + ['workweek changed from', 'amp', 'gt', 'quot', ';', '&', '=', ' ']) }}, + '""', '":"') + as change_description - from {{ ref('stg_zendesk__audit_log') }} - where lower(change_description) like '%workweek%' - order by created_at desc -), - -jsonify as ( - - select - _fivetran_synced, - created_at, - schedule_id, - original, - {{ dbt.split_part('change_description', "'workweek changed from '", 2) }} as change_description - from reformat -), - -split_up as ( + from audit_logs + where lower(change_description) like '%workweek changed from%' +), split_to_from as ( select _fivetran_synced, - created_at, schedule_id, - original, - {{ dbt.split_part('change_description', "' to '", 1) }} as from_schedule, - {{ dbt.split_part('change_description', "' to '", 2) }} as to_schedule - from jsonify -), - -split_days as ( - - select - _fivetran_synced, created_at, - schedule_id, - original - {%- for day in ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] -%} - , {{ regex_extract(json_parse_nonscalar('from_schedule', [day]), 'start') }} as from_{{ day }}_start - , {{ regex_extract(json_parse_nonscalar('from_schedule', [day]), 'end') }} as from_{{ day }}_end - , {{ regex_extract(json_parse_nonscalar('to_schedule', [day]), 'start') }} as to_{{ day }}_start - , {{ regex_extract(json_parse_nonscalar('to_schedule', [day]), 'end') }} as to_{{ day }}_end - {% endfor %} - - from split_up -), - -verticalize as ( - - {%- for day in ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun'] -%} - select - _fivetran_synced, - lag(created_at) over (partition by schedule_id order by created_at) as created_at, - schedule_id, - original, - '{{ day }}' as dow, - from_{{ day }}_start as start_time, - from_{{ day }}_end as end_time + created_at as valid_from, + min(created_at) over (partition by schedule_id) as min_valid_from, + coalesce( + lead(created_at) over ( + partition by schedule_id order by created_at), + {{ dbt.current_timestamp_backcompat() }}) + as valid_to, + {{ dbt.split_part('change_description', "'to'", 1) }} as from_schedule, + {{ dbt.split_part('change_description', "'to'", 2) }} as to_schedule + from audit_logs_cleaned + +), split_days as ( +{% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %} + {% for day, day_number in days_of_week.items() %} + select + split_to_from.*, + '{{ day }}' as day_of_week, + cast('{{ day_number }}' as {{ dbt.type_int() }}) as day_of_week_number, + replace({{ zendesk.regex_extract('from_schedule', "'.*?" ~ day ~ ".*?({.*?})'") }}, ',', '},{') as from_schedule_cleaned, + replace({{ zendesk.regex_extract('to_schedule', "'.*?" ~ day ~ ".*?({.*?})'") }}, ',', '},{') as to_schedule_cleaned + from split_to_from + + {% if not loop.last %}union all{% endif %} + {% endfor %} +), schedule_arrays as ( + select + split_days.*, + {{ zendesk.to_json_array(dbt.concat(['"["', 'from_schedule_cleaned', '"]"'])) }} as from_schedule_array, + {{ zendesk.to_json_array(dbt.concat(['"["', 'to_schedule_cleaned', '"]"'])) }} as to_schedule_array from split_days - where from_{{ day }}_start is not null and from_{{ day }}_end is not null - - union distinct +), unnest_schedules as ( + -- only want the first "from_schedule" to start off select - _fivetran_synced, - created_at, - schedule_id, - original, - '{{ day }}' as dow, - to_{{ day }}_start as start_time, - to_{{ day }}_end as end_time - - from split_days - where to_{{ day }}_start is not null and to_{{ day }}_end is not null + schedule_arrays.*, + {{ clean_string('unnested_from_schedule', ['{', '}', '"']) }} as unnested_schedule, + 'from' as schedule_source + from schedule_arrays + cross join unnest(from_schedule_array) as unnested_from_schedule + where valid_from = min_valid_from - {% if not loop.last %}union distinct{% endif %} + union all - {% endfor %} -), + select + schedule_arrays.*, + {{ clean_string('unnested_to_schedule', ['{', '}', '"']) }} as unnested_schedule, + 'to' as schedule_source + from schedule_arrays + cross join unnest(to_schedule_array) as unnested_to_schedule + where valid_from != min_valid_from -split_times as ( +), split_times as ( select schedule_id, - cast(nullif({{ dbt.split_part('start_time', "':'", 1) }}, ' ') as {{ dbt.type_int() }}) as start_time_hh, - cast(nullif({{ dbt.split_part('start_time', "':'", 2) }}, ' ') as {{ dbt.type_int() }}) as start_time_mm, - cast(nullif({{ dbt.split_part('end_time', "':'", 1) }}, ' ') as {{ dbt.type_int() }}) as end_time_hh, - cast(nullif({{ dbt.split_part('end_time', "':'", 2) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm, - start_time, - end_time, - dow, + cast(nullif({{ dbt.split_part('unnested_schedule', "':'", 1) }}, ' ') as {{ dbt.type_int() }}) as start_time_hh, + cast(nullif({{ dbt.split_part('unnested_schedule', "':'", 2) }}, ' ') as {{ dbt.type_int() }}) as start_time_mm, + cast(nullif({{ dbt.split_part('unnested_schedule', "':'", 3) }}, ' ') as {{ dbt.type_int() }}) as end_time_hh, + cast(nullif({{ dbt.split_part('unnested_schedule', "':'", 4) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm, + day_of_week, + day_of_week_number, _fivetran_synced, - created_at as valid_from, - coalesce(lead(created_at) over (partition by schedule_id, dow order by created_at), {{ dbt.current_timestamp_backcompat() }}) as valid_to + valid_from, + valid_to, + schedule_source - from verticalize + from unnest_schedules ), final as ( select schedule_id, - start_time_hh * 60 + start_time_mm + 24 * 60 * case - when dow = 'mon' then 1 - when dow = 'tue' then 2 - when dow = 'wed' then 3 - when dow = 'thu' then 4 - when dow = 'fri' then 5 - when dow = 'sat' then 6 - else 0 end as start_time, - end_time_hh * 60 + end_time_mm + 24 * 60 * case - when dow = 'mon' then 1 - when dow = 'tue' then 2 - when dow = 'wed' then 3 - when dow = 'thu' then 4 - when dow = 'fri' then 5 - when dow = 'sat' then 6 - else 0 end as end_time, + start_time_hh * 60 + start_time_mm + day_of_week_number * 24 * 60 as start_time, + end_time_hh * 60 + end_time_mm + day_of_week_number * 24 * 60 as end_time, coalesce(valid_from, '1970-01-01') as valid_from, valid_to, _fivetran_synced, - dow + day_of_week, + schedule_source from split_times ) From 54cdf2103490b07a335c94f20154803890e12a22 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 13 Sep 2024 12:09:10 -0500 Subject: [PATCH 11/76] update unnest logic --- macros/clean_string.sql | 14 --- macros/to_json_array.sql | 35 ------ macros/unnest_json_array.sql | 35 ++++++ .../int_zendesk__schedule_history.sql | 115 +++++++++--------- 4 files changed, 91 insertions(+), 108 deletions(-) delete mode 100644 macros/clean_string.sql delete mode 100644 macros/to_json_array.sql create mode 100644 macros/unnest_json_array.sql diff --git a/macros/clean_string.sql b/macros/clean_string.sql deleted file mode 100644 index 9b3b45f8..00000000 --- a/macros/clean_string.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro clean_string(string_field, character_list) -%} - {{ return(adapter.dispatch('clean_string', 'zendesk')(string_field, character_list)) }} -{%- endmacro %} - -{% macro default__clean_string(string_field, character_list) %} - {% for character in character_list -%} - replace( - {%- endfor -%} - {{ string_field }} - {% for character in character_list -%} - , {{ "'" ~ character ~ "'"}}, '') - {%- endfor -%} - -{% endmacro %} \ No newline at end of file diff --git a/macros/to_json_array.sql b/macros/to_json_array.sql deleted file mode 100644 index 8115ed2e..00000000 --- a/macros/to_json_array.sql +++ /dev/null @@ -1,35 +0,0 @@ -{% macro to_json_array(string) -%} - -{{ adapter.dispatch('to_json_array', 'zendesk') (string) }} - -{%- endmacro %} - -{% macro default__to_json_array(string) %} - - json_extract_array({{ string }}, '$') - -{% endmacro %} - -{% macro redshift__to_json_array(string) %} - - json_parse({{ string }}) - -{% endmacro %} - -{% macro postgres__to_json_array(string) %} - - {{ string }}::jsonb - -{% endmacro %} - -{% macro snowflake__to_json_array(string) %} - - ARRAY_CONSTRUCT_PARSED({{ string }}) - -{% endmacro %} - -{% macro spark__to_json_array(string) %} - - JSON_ARRAY({{ string }}) - -{% endmacro %} \ No newline at end of file diff --git a/macros/unnest_json_array.sql b/macros/unnest_json_array.sql new file mode 100644 index 00000000..ea4367f4 --- /dev/null +++ b/macros/unnest_json_array.sql @@ -0,0 +1,35 @@ +{% macro unnest_json_array(string) -%} + +{{ adapter.dispatch('unnest_json_array', 'zendesk') (string) }} + +{%- endmacro %} + +{% macro bigquery__unnest_json_array(string) %} + + unnest(json_extract_array({{ string }}, '$')) + +{% endmacro %} + +{% macro snowflake__unnest_json_array(string) %} + + lateral flatten(input => parse_json({{ string }})) + +{% endmacro %} + +{% macro redshift__unnest_json_array(string) %} + + json_array_elements_text('{{ string }}') + +{% endmacro %} + +{% macro postgres__unnest_json_array(string) %} + + jsonb_array_elements({{ string }}::jsonb) + +{% endmacro %} + +{% macro spark__unnest_json_array(string) %} + + explode(from_json({{ string }}, 'array')) + +{% endmacro %} \ No newline at end of file diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql index cf6bc158..b3cb0e39 100644 --- a/models/intermediate/int_zendesk__schedule_history.sql +++ b/models/intermediate/int_zendesk__schedule_history.sql @@ -1,109 +1,106 @@ with audit_logs as ( - select * + select + _fivetran_synced, + source_id as schedule_id, + created_at, + lower(change_description) as change_description from {{ var('audit_log') }} + where lower(change_description) like '%workweek changed from%' -), audit_logs_cleaned as ( +), audit_logs_enhanced as ( select _fivetran_synced, + schedule_id, created_at, - source_id as schedule_id, - change_description as original, - replace( - {{ clean_string('lower(change_description)', - ['workweek changed from', 'amp', 'gt', 'quot', ';', '&', '=', ' ']) }}, - '""', '":"') - as change_description - + min(created_at) over (partition by schedule_id) as min_created_at, + replace(replace(replace(replace(change_description, + '"', '"') , + 'amp;', '') , + '=>', ':'), + ' ', '') + as change_description_cleaned from audit_logs - where lower(change_description) like '%workweek changed from%' ), split_to_from as ( + -- 'from' select - _fivetran_synced, - schedule_id, - created_at, + audit_logs_enhanced.*, + cast('1970-01-01' as {{ dbt.type_timestamp() }}) as valid_from, + created_at as valid_to, + {{ dbt.split_part('change_description_cleaned', "'to'", 1) }} as schedule_change, + 'from' as change_type -- remove before release but helpful for debugging + from audit_logs_enhanced + where created_at = min_created_at -- the 'from' portion only matters for the first row + + union all + + -- 'to' + select + audit_logs_enhanced.*, created_at as valid_from, - min(created_at) over (partition by schedule_id) as min_valid_from, coalesce( lead(created_at) over ( partition by schedule_id order by created_at), {{ dbt.current_timestamp_backcompat() }}) as valid_to, - {{ dbt.split_part('change_description', "'to'", 1) }} as from_schedule, - {{ dbt.split_part('change_description', "'to'", 2) }} as to_schedule - from audit_logs_cleaned + {{ dbt.split_part('change_description_cleaned', "'to'", 2) }} as schedule_change, + 'to' as change_type -- remove before release but helpful for debugging + from audit_logs_enhanced ), split_days as ( -{% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %} + {% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %} {% for day, day_number in days_of_week.items() %} select split_to_from.*, '{{ day }}' as day_of_week, cast('{{ day_number }}' as {{ dbt.type_int() }}) as day_of_week_number, - replace({{ zendesk.regex_extract('from_schedule', "'.*?" ~ day ~ ".*?({.*?})'") }}, ',', '},{') as from_schedule_cleaned, - replace({{ zendesk.regex_extract('to_schedule', "'.*?" ~ day ~ ".*?({.*?})'") }}, ',', '},{') as to_schedule_cleaned + replace( + {{ dbt.concat([ + '"["', + zendesk.regex_extract('schedule_change', "'.*?" ~ day ~ ".*?({.*?})'"), + '"]"']) }} + , ',', '},{') + as schedule_change_cleaned from split_to_from - {% if not loop.last %}union all{% endif %} {% endfor %} -), schedule_arrays as ( - select - split_days.*, - {{ zendesk.to_json_array(dbt.concat(['"["', 'from_schedule_cleaned', '"]"'])) }} as from_schedule_array, - {{ zendesk.to_json_array(dbt.concat(['"["', 'to_schedule_cleaned', '"]"'])) }} as to_schedule_array - from split_days - -), unnest_schedules as ( +), unnested_schedules as ( -- only want the first "from_schedule" to start off select - schedule_arrays.*, - {{ clean_string('unnested_from_schedule', ['{', '}', '"']) }} as unnested_schedule, - 'from' as schedule_source - from schedule_arrays - cross join unnest(from_schedule_array) as unnested_from_schedule - where valid_from = min_valid_from - - union all - - select - schedule_arrays.*, - {{ clean_string('unnested_to_schedule', ['{', '}', '"']) }} as unnested_schedule, - 'to' as schedule_source - from schedule_arrays - cross join unnest(to_schedule_array) as unnested_to_schedule - where valid_from != min_valid_from + split_days.*, + replace(replace(replace(unnested_schedule, '{', ''), '}', ''), '"', '') as cleaned_unnested_schedule + from split_days + -- need to update for all warehouses + cross join {{ zendesk.unnest_json_array('schedule_change_cleaned') }} as unnested_schedule ), split_times as ( select schedule_id, - cast(nullif({{ dbt.split_part('unnested_schedule', "':'", 1) }}, ' ') as {{ dbt.type_int() }}) as start_time_hh, - cast(nullif({{ dbt.split_part('unnested_schedule', "':'", 2) }}, ' ') as {{ dbt.type_int() }}) as start_time_mm, - cast(nullif({{ dbt.split_part('unnested_schedule', "':'", 3) }}, ' ') as {{ dbt.type_int() }}) as end_time_hh, - cast(nullif({{ dbt.split_part('unnested_schedule', "':'", 4) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm, + cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 1) }}, ' ') as {{ dbt.type_int() }}) as start_time_hh, + cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 2) }}, ' ') as {{ dbt.type_int() }}) as start_time_mm, + cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 3) }}, ' ') as {{ dbt.type_int() }}) as end_time_hh, + cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 4) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm, day_of_week, day_of_week_number, _fivetran_synced, valid_from, - valid_to, - schedule_source - - from unnest_schedules + valid_to + from unnested_schedules ), final as ( select + _fivetran_synced, schedule_id, + valid_from, + valid_to, start_time_hh * 60 + start_time_mm + day_of_week_number * 24 * 60 as start_time, end_time_hh * 60 + end_time_mm + day_of_week_number * 24 * 60 as end_time, - coalesce(valid_from, '1970-01-01') as valid_from, - valid_to, - _fivetran_synced, day_of_week, - schedule_source - + day_of_week_number from split_times ) From 505f3431d83786c6eb1c4f0fb0c1ac45c2e02e53 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Mon, 23 Sep 2024 15:41:14 -0500 Subject: [PATCH 12/76] update schedule_history --- integration_tests/dbt_project.yml | 2 +- integration_tests/seeds/audit_log_data.csv | 4 +- macros/regex_extract.sql | 19 ++-- macros/unnest_json_array.sql | 35 ------- .../int_zendesk__schedule_history.sql | 93 +++++++++---------- 5 files changed, 57 insertions(+), 96 deletions(-) delete mode 100644 macros/unnest_json_array.sql diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 66c3d094..8cda59d4 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -6,7 +6,7 @@ version: '0.17.0' profile: 'integration_tests' vars: - zendesk_schema: zendesk_integration_tests_50 + zendesk_schema: zz_catherine_zendesk_test #zendesk_integration_tests_50 zendesk_source: zendesk_organization_identifier: "organization_data" zendesk_schedule_identifier: "schedule_data" diff --git a/integration_tests/seeds/audit_log_data.csv b/integration_tests/seeds/audit_log_data.csv index a0bef688..a7a636e0 100644 --- a/integration_tests/seeds/audit_log_data.csv +++ b/integration_tests/seeds/audit_log_data.csv @@ -3,4 +3,6 @@ id,_fivetran_synced,action,actor_id,change_description,created_at,source_id,sour 2679952,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {:thu=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}, :mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :wed=&gt;{""09:00""=&gt;""17:00""}} to {:mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :wed=&gt;{""09:00""=&gt;""17:00""}, :thu=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}}",2024-05-21 11:20:29.000000,267996,Workweek: New schedule here,zendesk/business_hours/workweek 293556,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {} to {:mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :wed=&gt;{""09:00""=&gt;""17:00""}, :thu=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}}",2024-05-21 11:20:28.000000,267996,Workweek: New schedule here,zendesk/business_hours/workweek 4441364,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {:wed=&gt;{""09:00""=&gt;""17:00""}, :thu=&gt;{""09:00""=&gt;""17:00""}, :mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}} to {:mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :wed=&gt;{""09:00""=&gt;""17:00""}, :thu=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}}",2024-05-21 11:20:10.000000,267996,Workweek: New schedule 2,zendesk/business_hours/workweek -70900,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {} to {:mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :wed=&gt;{""09:00""=&gt;""17:00""}, :thu=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}}",2024-05-21 11:20:09.000000,267996,Workweek: New schedule 2,zendesk/business_hours/workweek \ No newline at end of file +70900,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {} to {:mon=&gt;{""09:00""=&gt;""17:00""}, :tue=&gt;{""09:00""=&gt;""17:00""}, :wed=&gt;{""09:00""=&gt;""17:00""}, :thu=&gt;{""09:00""=&gt;""17:00""}, :fri=&gt;{""09:00""=&gt;""17:00""}}",2024-05-21 11:20:09.000000,267996,Workweek: New schedule 2,zendesk/business_hours/workweek +70901,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {"mon":{"10:00":"20:00"},"tue":{"10:00":"20:00"},"wed":{"10:00":"20:00"},"thu":{"10:00":"20:00"},"fri":{"10:00":"20:00"}} to {"mon":{"10:00":"22:00"},"tue":{"10:00":"22:00"},"wed":{"10:00":"22:00"},"thu":{"10:00":"22:00"},"fri":{"10:00":"22:00"}}",2024-05-21 11:20:09.000000,267996,Workweek: New schedule 2,zendesk/business_hours/workweek +70902,2024-05-28 16:18:58.471000,update,37253,"Workweek changed from {:mon=&gt;{""09:00""=&gt;""10:45"", ""11:45""=&gt;""12:45"", ""13:45""=&gt;""14:45"", ""15:15""=&gt;""16:15"", ""19:00""=&gt;""20:00"", ""17:30""=&gt;""18:30""}, :tue=&gt;{""00:15""=&gt;""13:15"", ""13:30""=&gt;""18:30"", ""18:45""=&gt;""21:45"", ""22:00""=&gt;""24:00""}, :wed=&gt;{""09:00""=&gt;""21:00""}, :thu=&gt;{""17:00""=&gt;""18:00"", ""19:45""=&gt;""20:45"", ""09:00""=&gt;""10:45"", ""12:15""=&gt;""13:15"", ""14:30""=&gt;""15:30""}, :fri=&gt;{""09:00""=&gt;""12:45"", ""19:15""=&gt;""22:30"", ""14:45""=&gt;""15:45"", ""17:30""=&gt;""18:30""}} to {:mon=&gt;{""09:00""=&gt;""10:45"", ""11:45""=&gt;""12:45"", ""13:45""=&gt;""14:45"", ""15:15""=&gt;""16:15"", ""17:30""=&gt;""18:30"", ""19:00""=&gt;""20:00""}, :tue=&gt;{""00:15""=&gt;""13:15"", ""13:30""=&gt;""18:30"", ""18:45""=&gt;""21:45"", ""22:00""=&gt;""24:00""}, :wed=&gt;{""02:30""=&gt;""21:45""}, :thu=&gt;{""09:00""=&gt;""10:45"", ""12:15""=&gt;""13:15"", ""14:30""=&gt;""15:30"", ""17:00""=&gt;""18:00"", ""19:45""=&gt;""20:45""}, :fri=&gt;{""09:00""=&gt;""12:45"", ""14:45""=&gt;""15:45"", ""17:30""=&gt;""18:30"", ""19:15""=&gt;""22:30""}}",2024-05-21 11:20:09.000000,267996,Workweek: New schedule 2,zendesk/business_hours/workweek \ No newline at end of file diff --git a/macros/regex_extract.sql b/macros/regex_extract.sql index 8342370f..bb7f83a6 100644 --- a/macros/regex_extract.sql +++ b/macros/regex_extract.sql @@ -8,34 +8,35 @@ regexp_extract({{ string }}, {{ regex }} ) -{% endmacro %} +{%- endmacro %} {% macro bigquery__regex_extract(string, regex) %} regexp_extract({{ string }}, {{ regex }} ) -{% endmacro %} +{%- endmacro %} {% macro snowflake__regex_extract(string, regex) %} REGEXP_SUBSTR({{ string }}, {{ regex }}, 1, 1, 'e', 1 ) -{% endmacro %} +{%- endmacro %} {% macro postgres__regex_extract(string, regex) %} (regexp_matches({{ string }}, {{ regex }}))[1] -{% endmacro %} +{%- endmacro %} {% macro redshift__regex_extract(string, regex) %} - REGEXP_SUBSTR({{ string }}, {{ regex }}, 1, 1, 'e') + {% set reformatted_regex = regex | replace(".*?", ".*") | replace("{", "\\\{") | replace("}", "\\\}") -%} + REGEXP_SUBSTR({{ string }}, {{ reformatted_regex }}, 1, 1, 'e') -{% endmacro %} +{%- endmacro %} {% macro spark__regex_extract(string, regex) %} + {% set reformatted_regex = regex | replace("{", "\\\{") | replace("}", "\\\}") -%} + regexp_extract({{ string }}, {{ reformatted_regex }}, 1) - regexp_extract({{ string }}, {{ regex }}, 1) - -{% endmacro %} \ No newline at end of file +{%- endmacro %} \ No newline at end of file diff --git a/macros/unnest_json_array.sql b/macros/unnest_json_array.sql deleted file mode 100644 index ea4367f4..00000000 --- a/macros/unnest_json_array.sql +++ /dev/null @@ -1,35 +0,0 @@ -{% macro unnest_json_array(string) -%} - -{{ adapter.dispatch('unnest_json_array', 'zendesk') (string) }} - -{%- endmacro %} - -{% macro bigquery__unnest_json_array(string) %} - - unnest(json_extract_array({{ string }}, '$')) - -{% endmacro %} - -{% macro snowflake__unnest_json_array(string) %} - - lateral flatten(input => parse_json({{ string }})) - -{% endmacro %} - -{% macro redshift__unnest_json_array(string) %} - - json_array_elements_text('{{ string }}') - -{% endmacro %} - -{% macro postgres__unnest_json_array(string) %} - - jsonb_array_elements({{ string }}::jsonb) - -{% endmacro %} - -{% macro spark__unnest_json_array(string) %} - - explode(from_json({{ string }}, 'array')) - -{% endmacro %} \ No newline at end of file diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql index b3cb0e39..e91f323e 100644 --- a/models/intermediate/int_zendesk__schedule_history.sql +++ b/models/intermediate/int_zendesk__schedule_history.sql @@ -13,11 +13,11 @@ with audit_logs as ( schedule_id, created_at, min(created_at) over (partition by schedule_id) as min_created_at, - replace(replace(replace(replace(change_description, - '"', '"') , - 'amp;', '') , - '=>', ':'), - ' ', '') + replace(replace(replace(replace(change_description, + 'workweek changed from', ''), + '"', '"'), + 'amp;', ''), + '=>', ':') as change_description_cleaned from audit_logs @@ -27,7 +27,7 @@ with audit_logs as ( audit_logs_enhanced.*, cast('1970-01-01' as {{ dbt.type_timestamp() }}) as valid_from, created_at as valid_to, - {{ dbt.split_part('change_description_cleaned', "'to'", 1) }} as schedule_change, + {{ dbt.split_part('change_description_cleaned', "' to '", 1) }} as schedule_change, 'from' as change_type -- remove before release but helpful for debugging from audit_logs_enhanced where created_at = min_created_at -- the 'from' portion only matters for the first row @@ -43,66 +43,59 @@ with audit_logs as ( partition by schedule_id order by created_at), {{ dbt.current_timestamp_backcompat() }}) as valid_to, - {{ dbt.split_part('change_description_cleaned', "'to'", 2) }} as schedule_change, + {{ dbt.split_part('change_description_cleaned', "' to '", 2) }} as schedule_change, 'to' as change_type -- remove before release but helpful for debugging from audit_logs_enhanced ), split_days as ( {% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %} {% for day, day_number in days_of_week.items() %} - select - split_to_from.*, - '{{ day }}' as day_of_week, - cast('{{ day_number }}' as {{ dbt.type_int() }}) as day_of_week_number, - replace( - {{ dbt.concat([ - '"["', - zendesk.regex_extract('schedule_change', "'.*?" ~ day ~ ".*?({.*?})'"), - '"]"']) }} - , ',', '},{') - as schedule_change_cleaned - from split_to_from + select + split_to_from.*, + '{{ day }}' as day_of_week, + cast('{{ day_number }}' as {{ dbt.type_int() }}) as day_of_week_number, + {{ zendesk.regex_extract('schedule_change', "'.*?" ~ day ~ ".*?({.*?})'") }} as day_of_week_schedule + from split_to_from {% if not loop.last %}union all{% endif %} {% endfor %} ), unnested_schedules as ( - -- only want the first "from_schedule" to start off - select + select split_days.*, - replace(replace(replace(unnested_schedule, '{', ''), '}', ''), '"', '') as cleaned_unnested_schedule + +{%- if target.type == 'bigquery' %} + replace(replace(replace(replace(unnested_schedule, '{', ''), '}', ''), '"', ''), ' ', '') as cleaned_unnested_schedule from split_days - -- need to update for all warehouses - cross join {{ zendesk.unnest_json_array('schedule_change_cleaned') }} as unnested_schedule + cross join unnest(json_extract_array('[' || replace(day_of_week_schedule, ',', '},{') || ']', '$')) as unnested_schedule -), split_times as ( +{%- elif target.type == 'snowflake' %} + unnested_schedule.key || ':' || unnested_schedule.value as cleaned_unnested_schedule + from split_days + cross join lateral flatten(input => parse_json(replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{'))) as unnested_schedule - select - schedule_id, - cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 1) }}, ' ') as {{ dbt.type_int() }}) as start_time_hh, - cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 2) }}, ' ') as {{ dbt.type_int() }}) as start_time_mm, - cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 3) }}, ' ') as {{ dbt.type_int() }}) as end_time_hh, - cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 4) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm, - day_of_week, - day_of_week_number, - _fivetran_synced, - valid_from, - valid_to - from unnested_schedules -), +{%- elif target.type == 'postgres' %} + replace(replace(replace(replace(unnested_schedule::text, '{', ''), '}', ''), '"', ''), ' ', '') as cleaned_unnested_schedule + from split_days + cross join lateral jsonb_array_elements(('[' || replace(day_of_week_schedule, ',', '},{') || ']')::jsonb) as unnested_schedule -final as ( +{%- elif target.type in ('databricks', 'spark') %} + replace(replace(replace(replace(unnested_schedule, '{', ''), '}', ''), '"', ''), ' ', '') as cleaned_unnested_schedule + from split_days + lateral view explode(from_json(concat('[', replace(day_of_week_schedule, ',', '},{'), ']'), 'array')) as unnested_schedule - select - _fivetran_synced, - schedule_id, - valid_from, - valid_to, - start_time_hh * 60 + start_time_mm + day_of_week_number * 24 * 60 as start_time, - end_time_hh * 60 + end_time_mm + day_of_week_number * 24 * 60 as end_time, - day_of_week, - day_of_week_number - from split_times +{%- elif target.type == 'redshift' %} + {# json_parse('[' || replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{') || ']') as json_schedule + from split_days #} + {# cross join lateral json_parse(replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{')) as element #} + + cast(null as {{ dbt.type_string() }}) as cleaned_unnested_schedule + from split_days + +{% else %} + cast(null as {{ dbt.type_string() }}) as cleaned_unnested_schedule + from split_days +{%- endif %} ) select * -from final \ No newline at end of file +from unnested_schedules \ No newline at end of file From 4ca4099a1158054408c98986051eba2dda3690ff Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Mon, 23 Sep 2024 18:43:54 -0500 Subject: [PATCH 13/76] complete schedule_history --- integration_tests/dbt_project.yml | 2 +- .../int_zendesk__schedule_history.sql | 27 +++++++++++++++++-- 2 files changed, 26 insertions(+), 3 deletions(-) diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 8cda59d4..66c3d094 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -6,7 +6,7 @@ version: '0.17.0' profile: 'integration_tests' vars: - zendesk_schema: zz_catherine_zendesk_test #zendesk_integration_tests_50 + zendesk_schema: zendesk_integration_tests_50 zendesk_source: zendesk_organization_identifier: "organization_data" zendesk_schedule_identifier: "schedule_data" diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql index e91f323e..6b7f14e7 100644 --- a/models/intermediate/int_zendesk__schedule_history.sql +++ b/models/intermediate/int_zendesk__schedule_history.sql @@ -22,7 +22,7 @@ with audit_logs as ( from audit_logs ), split_to_from as ( - -- 'from' + -- 'from' establishes the schedule from before the change occurred select audit_logs_enhanced.*, cast('1970-01-01' as {{ dbt.type_timestamp() }}) as valid_from, @@ -95,7 +95,30 @@ with audit_logs as ( cast(null as {{ dbt.type_string() }}) as cleaned_unnested_schedule from split_days {%- endif %} + +), split_times as ( + + select + unnested_schedules.*, + cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 1) }}, ' ') as {{ dbt.type_int() }}) as start_time_hh, + cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 2) }}, ' ') as {{ dbt.type_int() }}) as start_time_mm, + cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 3) }}, ' ') as {{ dbt.type_int() }}) as end_time_hh, + cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 4) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm + from unnested_schedules + +), final as ( + + select + _fivetran_synced, + schedule_id, + start_time_hh * 60 + start_time_mm + 24 * 60 * day_of_week_number as start_time, + end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time, + valid_from, + valid_to, + day_of_week, + day_of_week_number + from split_times ) select * -from unnested_schedules \ No newline at end of file +from final \ No newline at end of file From 7806d83edbd5b3640497280a8872f76407f29ce4 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 25 Sep 2024 21:04:44 -0500 Subject: [PATCH 14/76] revise holidays --- .../int_zendesk__schedule_history.sql | 19 +- .../int_zendesk__schedule_spine.sql | 440 ++++++------------ .../int_zendesk__timezone_daylight.sql} | 19 +- 3 files changed, 166 insertions(+), 312 deletions(-) rename models/{intermediate => history}/int_zendesk__schedule_history.sql (90%) rename models/{intermediate/int_zendesk__timezones_w_dt.sql => utils/int_zendesk__timezone_daylight.sql} (87%) diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql similarity index 90% rename from models/intermediate/int_zendesk__schedule_history.sql rename to models/history/int_zendesk__schedule_history.sql index 6b7f14e7..a8178ed7 100644 --- a/models/intermediate/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -1,4 +1,9 @@ -with audit_logs as ( +with schedule as ( + + select * + from {{ var('schedule') }} + +,) audit_logs as ( select _fivetran_synced, source_id as schedule_id, @@ -26,7 +31,7 @@ with audit_logs as ( select audit_logs_enhanced.*, cast('1970-01-01' as {{ dbt.type_timestamp() }}) as valid_from, - created_at as valid_to, + created_at as valid_until, {{ dbt.split_part('change_description_cleaned', "' to '", 1) }} as schedule_change, 'from' as change_type -- remove before release but helpful for debugging from audit_logs_enhanced @@ -42,7 +47,7 @@ with audit_logs as ( lead(created_at) over ( partition by schedule_id order by created_at), {{ dbt.current_timestamp_backcompat() }}) - as valid_to, + as valid_until, {{ dbt.split_part('change_description_cleaned', "' to '", 2) }} as schedule_change, 'to' as change_type -- remove before release but helpful for debugging from audit_logs_enhanced @@ -64,7 +69,7 @@ with audit_logs as ( split_days.*, {%- if target.type == 'bigquery' %} - replace(replace(replace(replace(unnested_schedule, '{', ''), '}', ''), '"', ''), ' ', '') as cleaned_unnested_schedule + {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule from split_days cross join unnest(json_extract_array('[' || replace(day_of_week_schedule, ',', '},{') || ']', '$')) as unnested_schedule @@ -74,12 +79,12 @@ with audit_logs as ( cross join lateral flatten(input => parse_json(replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{'))) as unnested_schedule {%- elif target.type == 'postgres' %} - replace(replace(replace(replace(unnested_schedule::text, '{', ''), '}', ''), '"', ''), ' ', '') as cleaned_unnested_schedule + {{ clean_schedule('unnested_schedule::text') }} as cleaned_unnested_schedule from split_days cross join lateral jsonb_array_elements(('[' || replace(day_of_week_schedule, ',', '},{') || ']')::jsonb) as unnested_schedule {%- elif target.type in ('databricks', 'spark') %} - replace(replace(replace(replace(unnested_schedule, '{', ''), '}', ''), '"', ''), ' ', '') as cleaned_unnested_schedule + {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule from split_days lateral view explode(from_json(concat('[', replace(day_of_week_schedule, ',', '},{'), ']'), 'array')) as unnested_schedule @@ -114,7 +119,7 @@ with audit_logs as ( start_time_hh * 60 + start_time_mm + 24 * 60 * day_of_week_number as start_time, end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time, valid_from, - valid_to, + valid_until, day_of_week, day_of_week_number from split_times diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index de197c74..27f2d54a 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -1,124 +1,57 @@ {{ config(enabled=var('using_schedules', True)) }} +{{ config(enabled=var('using_schedules', True)) }} + /* The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time) */ -with timezone as ( +with schedule as ( select * - from {{ var('time_zone') }} + from {{ var('schedule') }} -), daylight_time as ( +), holiday as ( select * - from {{ var('daylight_time') }} + from {{ var('schedule_holiday') }} --- TODO: BRING SCHEDULE HISTORY INTO THIS MODEL LIKELY -), schedule as ( +), calendar_spine as ( select * - from {{ var('schedule') }} + from {{ ref('int_zendesk__calendar_spine') }} + +), split_timezones as ( + + select * + from {{ ref('int_zendesk__timezone_daylight') }} -- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules. ), schedule_holiday as ( select - _fivetran_synced, - cast(date_day as {{ dbt.type_timestamp() }} ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week. - cast(date_day as {{ dbt.type_timestamp() }} ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week. - holiday_id, - holiday_name, - schedule_id - - from {{ var('schedule_holiday') }} - inner join {{ ref('int_zendesk__calendar_spine') }} + holiday._fivetran_synced, + {# cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week. + cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week. #} + cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_date, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week. + holiday.holiday_id, + holiday.holiday_name, + holiday.schedule_id + + from holiday + inner join calendar_spine on holiday_start_date_at <= cast(date_day as {{ dbt.type_timestamp() }} ) and holiday_end_date_at >= cast(date_day as {{ dbt.type_timestamp() }} ) -), timezone_with_dt as ( - - select - timezone.*, - daylight_time.daylight_start_utc, - daylight_time.daylight_end_utc, - daylight_time.daylight_offset_minutes - - from timezone - left join daylight_time - on timezone.time_zone = daylight_time.time_zone - -), order_timezone_dt as ( - - select - *, - -- will be null for timezones without any daylight savings records (and the first entry) - -- we will coalesce the first entry date with .... the X years ago - lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc, - -- will be null for timezones without any daylight savings records (and the last entry) - -- we will coalesce the last entry date with the current date - lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc - - from timezone_with_dt - -), split_timezones as ( - - -- standard schedule (includes timezones without DT) - -- starts: when the last Daylight Savings ended - -- ends: when the next Daylight Savings starts - select - time_zone, - standard_offset_minutes as offset_minutes, - - -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT - coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from, - - -- daylight_start_utc is null for timezones that don't use DT - coalesce(daylight_start_utc, cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date)) as valid_until - - from order_timezone_dt - - union all - - -- DT schedule (excludes timezones without it) - -- starts: when this Daylight Savings started - -- ends: when this Daylight Savings ends - select - time_zone, - -- Pacific Time is -8h during standard time and -7h during DT - standard_offset_minutes + daylight_offset_minutes as offset_minutes, - daylight_start_utc as valid_from, - daylight_end_utc as valid_until - - from order_timezone_dt - where daylight_offset_minutes is not null - - union all - - select - time_zone, - standard_offset_minutes as offset_minutes, - - -- Get the latest daylight_end_utc time and set that as the valid_from - max(daylight_end_utc) as valid_from, - - -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future. - cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date) as valid_until - - from order_timezone_dt - group by 1, 2 - -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979. - having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp_backcompat() }} as date) - ), calculate_schedules as ( select schedule.schedule_id, - schedule.time_zone, + lower(schedule.time_zone) as time_zone, schedule.start_time, schedule.end_time, - schedule.created_at, + {# schedule.created_at, #} schedule.schedule_name, schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, @@ -129,237 +62,152 @@ with timezone as ( from schedule left join split_timezones - on split_timezones.time_zone = schedule.time_zone - --- Now we need take holiday's into consideration and perform the following transformations to account for Holidays in existing schedules -), holiday_start_end_times as ( + on split_timezones.time_zone = lower(schedule.time_zone) - select - calculate_schedules.*, +), join_holidays as ( + select + calculate_schedules.schedule_id, + calculate_schedules.time_zone, + calculate_schedules.start_time_utc, + calculate_schedules.end_time_utc, + calculate_schedules.schedule_name, + schedule_holiday.holiday_date, schedule_holiday.holiday_name, - schedule_holiday.holiday_start_date_at, - cast({{ dbt.dateadd("second", "86400", "schedule_holiday.holiday_end_date_at") }} as {{ dbt.type_timestamp() }}) as holiday_end_date_at, -- add 24*60*60 seconds - cast({{ dbt_date.week_start("schedule_holiday.holiday_start_date_at") }} as {{ dbt.type_timestamp() }}) as holiday_week_start, - cast({{ dbt_date.week_end("schedule_holiday.holiday_end_date_at") }} as {{ dbt.type_timestamp() }}) as holiday_week_end - from schedule_holiday - inner join calculate_schedules - on calculate_schedules.schedule_id = schedule_holiday.schedule_id - and schedule_holiday.holiday_start_date_at >= calculate_schedules.valid_from - and schedule_holiday.holiday_start_date_at < calculate_schedules.valid_until - --- Let's calculate the start and end date of the Holiday in terms of minutes from Sunday (like other Zendesk schedules) -), holiday_minutes as( - - select - holiday_start_end_times.*, - {{ dbt.datediff("holiday_week_start", "holiday_start_date_at", "minute") }} - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_start, - {{ dbt.datediff("holiday_week_start", "holiday_end_date_at", "minute") }} - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_end - from holiday_start_end_times - left join timezone - on timezone.time_zone = holiday_start_end_times.time_zone - --- Determine which schedule days include a holiday -), holiday_check as ( + calculate_schedules.valid_from as schedule_valid_from, + calculate_schedules.valid_until as schedule_valid_until + from calculate_schedules + left join schedule_holiday + on schedule_holiday.schedule_id = calculate_schedules.schedule_id + and schedule_holiday.holiday_date <= calculate_schedules.valid_until + and schedule_holiday.holiday_date >= calculate_schedules.valid_from +), holiday_neighbors as( select - *, - case when minutes_from_sunday_start < start_time_utc and minutes_from_sunday_end > end_time_utc - then holiday_name - end as holiday_name_check - from holiday_minutes - --- Consolidate the holiday records that were just created -), holiday_consolidated as ( - - select - schedule_id, - time_zone, - schedule_name, - valid_from, - valid_until, - start_time_utc, - end_time_utc, - holiday_week_start, - cast({{ dbt.dateadd("second", "86400", "holiday_week_end") }} as {{ dbt.type_timestamp() }}) as holiday_week_end, - max(holiday_name_check) as holiday_name_check - from holiday_check - {{ dbt_utils.group_by(n=9) }} - --- Since we have holiday schedules and normal schedules, we need to union them into a holistic schedule spine -), spine_union as ( - + schedule_id, + time_zone, + start_time_utc, + end_time_utc, + schedule_name, + holiday_name, + holiday_date, + schedule_valid_from, + schedule_valid_until, + lag(holiday_date) over (partition by schedule_id, start_time_utc order by holiday_date) as prior_holiday, + lead(holiday_date) over (partition by schedule_id, start_time_utc order by holiday_date) as next_holiday + from join_holidays + +), split_holidays as( select - schedule_id, - time_zone, - schedule_name, - valid_from, - valid_until, - start_time_utc, - end_time_utc, - holiday_week_start, - holiday_week_end, - holiday_name_check - from holiday_consolidated + schedule_id, + time_zone, + start_time_utc, + end_time_utc, + schedule_name, + schedule_valid_from, + schedule_valid_until, + holiday_name, + holiday_date, + case + when (date_diff(holiday_date, prior_holiday, day) > 1 + or prior_holiday is null) + then 'start' + end as holiday_start_or_end, + schedule_valid_from as valid_from, + holiday_date as valid_until + from holiday_neighbors + where holiday_date is not null union all select - schedule_id, - time_zone, - schedule_name, - valid_from, - valid_until, - start_time_utc, - end_time_utc, - null as holiday_week_start, - null as holiday_week_end, - null as holiday_name_check - from calculate_schedules - --- Now that we have an understanding of which weeks are holiday's let's consolidate them with non holiday weeks -), all_periods as ( - - select distinct schedule_id, - holiday_week_start as period_start, - holiday_week_end as period_end, + time_zone, start_time_utc, end_time_utc, - holiday_name_check, - true as is_holiday_week - from spine_union - where holiday_week_start is not null - and holiday_week_end is not null + schedule_name, + schedule_valid_from, + schedule_valid_until, + holiday_name, + holiday_date, + case + when (date_diff(next_holiday, holiday_date, day) > 1 + or next_holiday is null) + then 'end' + end as holiday_start_or_end, + holiday_date as valid_from, + schedule_valid_until as valid_until, + from holiday_neighbors + where holiday_date is not null union all - select distinct + select schedule_id, - valid_from as period_start, - valid_until as period_end, + time_zone, start_time_utc, end_time_utc, - cast(null as {{ dbt.type_string() }}) as holiday_name_check, - false as is_holiday_week - from spine_union - --- We have holiday and non holiday schedules together, now let's sort them to understand the previous end and next start of neighboring schedules -), sorted_periods as ( - - select distinct - *, - lag(period_end) over (partition by schedule_id order by period_start, start_time_utc) as prev_end, - lead(period_start) over (partition by schedule_id order by period_start, start_time_utc) as next_start - from all_periods - --- We need to adjust some non holiday schedules in order to properly fill holiday gaps in the schedules later down the transformation -), non_holiday_period_adjustments as ( + schedule_name, + schedule_valid_from, + schedule_valid_until, + holiday_name, + holiday_date, + cast(null as {{ dbt.type_string() }}) as holiday_start_or_end, + schedule_valid_from as valid_from, + schedule_valid_until as valid_until + from holiday_neighbors + where holiday_date is null + +), valid_from_partition as( + select + * + , row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date) as valid_from_index + from split_holidays + where not (holiday_date is not null and holiday_start_or_end is null) +), adjust_ranges as( select - schedule_id, - period_start, - period_end, - prev_end, - next_start, - -- taking first_value/last_value because prev_end and next_start are inconsistent within the schedule partitions -- they all include a record that is outside the partition. so we need to ignore those erroneous records that slip in - coalesce(greatest(case - when not is_holiday_week and prev_end is not null then first_value(prev_end) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following) - else period_start - end, period_start), period_start) as valid_from, - coalesce(case - when not is_holiday_week and next_start is not null then last_value(next_start) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following) - else period_end - end, period_end) as valid_until, + schedule_id, + time_zone, start_time_utc, end_time_utc, - holiday_name_check, - is_holiday_week - from sorted_periods - --- A few window function results will be leveraged downstream. Let's generate them now. -), gap_starter as ( - select - *, - max(period_end) over (partition by schedule_id) as max_valid_until, - last_value(next_start) over (partition by schedule_id, period_start order by valid_until rows between unbounded preceding and unbounded following) as lead_next_start, - first_value(prev_end) over (partition by schedule_id, valid_from order by start_time_utc rows between unbounded preceding and unbounded following) as first_prev_end - from non_holiday_period_adjustments - --- There may be gaps in holiday and non holiday schedules, so we need to identify where these gaps are -), gap_adjustments as( + schedule_name, + holiday_name, + holiday_date, + holiday_start_or_end, - select - *, - -- In order to identify the gaps we check to see if the valid_from and previous valid_until are right next to one. If we add two hours to the previous valid_until it should always be greater than the current valid_from. - -- However, if the valid_from is greater instead then we can identify that this period has a gap that needs to be filled. case - when cast({{ dbt.dateadd("hour", "2", "valid_until") }} as {{ dbt.type_timestamp() }}) < cast(lead_next_start as {{ dbt.type_timestamp() }}) - then 'gap' - when (lead_next_start is null and valid_from < max_valid_until and period_end != max_valid_until) - then 'gap' - else null - end as is_schedule_gap + when holiday_start_or_end = 'start' + then case when valid_from_index > 1 + then lag(holiday_date) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) + else schedule_valid_from + end + when holiday_start_or_end = 'end' + then cast({{ dbt.dateadd(datepart="day", interval=1, from_date_or_timestamp="holiday_date") }} as {{ dbt.type_timestamp() }}) + else cast(schedule_valid_from as {{ dbt.type_timestamp() }}) + end as valid_from, - from gap_starter - --- We know where the gaps are, so now lets prime the data to fill those gaps -), schedule_spine_primer as ( - - select - schedule_id, - valid_from, - valid_until, - start_time_utc, - end_time_utc, - lead_next_start, - max_valid_until, - holiday_name_check, - is_holiday_week, - max(is_schedule_gap) over (partition by schedule_id, valid_until) as is_gap_period, - lead(valid_from) over (partition by schedule_id order by valid_from, start_time_utc) as fill_primer - from gap_adjustments - --- We know the gaps and where they are, so let's fill them with the following union -), final_union as ( - - -- For all gap periods, let's properly create a schedule filled before the holiday. - select - schedule_id, - valid_until as valid_from, - coalesce(last_value(fill_primer) over (partition by schedule_id, valid_until order by start_time_utc rows between unbounded preceding and unbounded following), max_valid_until) as valid_until, - start_time_utc, - end_time_utc, - cast(null as {{ dbt.type_string() }}) as holiday_name_check, - false as is_holiday_week - from schedule_spine_primer - where is_gap_period is not null - - union all - - -- Fill all other normal schedules. - select - schedule_id, - valid_from, - valid_until, - start_time_utc, - end_time_utc, - holiday_name_check, - is_holiday_week - from schedule_spine_primer - --- We can finally filter out the holiday_name_check results as the gap filling properly filled in the gaps for holidays -), final as( + case + when holiday_start_or_end = 'start' + then holiday_date + when holiday_start_or_end = 'end' + then case when valid_from_index > 1 + then coalesce( + lead(holiday_date) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index), + schedule_valid_until) + else schedule_valid_until + end + else schedule_valid_until + end as valid_until, + + valid_from_index + from valid_from_partition + where not (valid_from_index > 1 and holiday_start_or_end = 'start') +) select - schedule_id, + schedule_id, valid_from, valid_until, start_time_utc, - end_time_utc, - is_holiday_week - from final_union - where holiday_name_check is null -) - -select * -from final \ No newline at end of file + end_time_utc + from adjust_ranges \ No newline at end of file diff --git a/models/intermediate/int_zendesk__timezones_w_dt.sql b/models/utils/int_zendesk__timezone_daylight.sql similarity index 87% rename from models/intermediate/int_zendesk__timezones_w_dt.sql rename to models/utils/int_zendesk__timezone_daylight.sql index a8cc91c6..f3974c1a 100644 --- a/models/intermediate/int_zendesk__timezones_w_dt.sql +++ b/models/utils/int_zendesk__timezone_daylight.sql @@ -1,10 +1,3 @@ -{{ config(enabled=var('using_schedules', True)) }} - -/* - The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings. - End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time) -*/ - with timezone as ( select * @@ -88,7 +81,15 @@ with timezone as ( group by 1, 2 -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979. having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp_backcompat() }} as date) + +), final as ( + select + lower(time_zone) as time_zone, + offset_minutes, + cast(valid_from as {{ dbt.type_timestamp() }}) as valid_from, + cast(valid_until as {{ dbt.type_timestamp() }}) as valid_until + from split_timezones ) -select * -from split_timezones \ No newline at end of file +select * +from final \ No newline at end of file From a54bee27f07542b8cc993f9bbf227d52ba4dd9d9 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 25 Sep 2024 21:09:51 -0500 Subject: [PATCH 15/76] add macro --- macros/clean_schedule.sql | 3 +++ models/intermediate/int_zendesk__schedule_spine.sql | 2 -- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 macros/clean_schedule.sql diff --git a/macros/clean_schedule.sql b/macros/clean_schedule.sql new file mode 100644 index 00000000..17d54ecb --- /dev/null +++ b/macros/clean_schedule.sql @@ -0,0 +1,3 @@ +{% macro clean_schedule(column_name) -%} + replace(replace(replace(replace({{ column_name }}, '{', ''), '}', ''), '"', ''), ' ', '') +{%- endmacro %} \ No newline at end of file diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 27f2d54a..b5fdbe4b 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -1,7 +1,5 @@ {{ config(enabled=var('using_schedules', True)) }} -{{ config(enabled=var('using_schedules', True)) }} - /* The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time) From 047754365df5091435ddd33cc685dab367ed2078 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 25 Sep 2024 21:11:43 -0500 Subject: [PATCH 16/76] typeo --- models/intermediate/int_zendesk__schedule_spine.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index b5fdbe4b..167f32ba 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -133,7 +133,7 @@ with schedule as ( then 'end' end as holiday_start_or_end, holiday_date as valid_from, - schedule_valid_until as valid_until, + schedule_valid_until as valid_until from holiday_neighbors where holiday_date is not null From f980433ebc0fb823a10a259769d3914f0f72764c Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 26 Sep 2024 17:37:42 -0500 Subject: [PATCH 17/76] updates --- .../history/int_zendesk__schedule_history.sql | 2 +- .../int_zendesk__schedule_spine.sql | 159 ++++++++++++------ 2 files changed, 105 insertions(+), 56 deletions(-) diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index a8178ed7..ca5a116b 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -9,7 +9,7 @@ with schedule as ( source_id as schedule_id, created_at, lower(change_description) as change_description - from {{ var('audit_log') }} + from {{ var('audit_log') }} where lower(change_description) like '%workweek changed from%' ), audit_logs_enhanced as ( diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 167f32ba..110d4a9b 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -30,9 +30,9 @@ with schedule as ( select holiday._fivetran_synced, - {# cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week. - cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week. #} - cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_date, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week. + cast(holiday.holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, + cast(holiday.holiday_end_date_at as {{ dbt.type_timestamp() }}) as holiday_valid_until, -- The valid_until will then be the the day after. + cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_date, holiday.holiday_id, holiday.holiday_name, holiday.schedule_id @@ -71,6 +71,8 @@ with schedule as ( calculate_schedules.schedule_name, schedule_holiday.holiday_date, schedule_holiday.holiday_name, + schedule_holiday.holiday_valid_from, + schedule_holiday.holiday_valid_until, calculate_schedules.valid_from as schedule_valid_from, calculate_schedules.valid_until as schedule_valid_until from calculate_schedules @@ -79,21 +81,6 @@ with schedule as ( and schedule_holiday.holiday_date <= calculate_schedules.valid_until and schedule_holiday.holiday_date >= calculate_schedules.valid_from -), holiday_neighbors as( - select - schedule_id, - time_zone, - start_time_utc, - end_time_utc, - schedule_name, - holiday_name, - holiday_date, - schedule_valid_from, - schedule_valid_until, - lag(holiday_date) over (partition by schedule_id, start_time_utc order by holiday_date) as prior_holiday, - lead(holiday_date) over (partition by schedule_id, start_time_utc order by holiday_date) as next_holiday - from join_holidays - ), split_holidays as( select schedule_id, @@ -105,14 +92,15 @@ with schedule as ( schedule_valid_until, holiday_name, holiday_date, + holiday_valid_from, + holiday_valid_until, case - when (date_diff(holiday_date, prior_holiday, day) > 1 - or prior_holiday is null) - then 'start' + when holiday_valid_from = holiday_date + then '0' end as holiday_start_or_end, schedule_valid_from as valid_from, holiday_date as valid_until - from holiday_neighbors + from join_holidays where holiday_date is not null union all @@ -127,14 +115,15 @@ with schedule as ( schedule_valid_until, holiday_name, holiday_date, + holiday_valid_from, + holiday_valid_until, case - when (date_diff(next_holiday, holiday_date, day) > 1 - or next_holiday is null) - then 'end' + when holiday_valid_until = holiday_date + then '1' end as holiday_start_or_end, holiday_date as valid_from, - schedule_valid_until as valid_until - from holiday_neighbors + schedule_valid_until as valid_until, + from join_holidays where holiday_date is not null union all @@ -149,19 +138,68 @@ with schedule as ( schedule_valid_until, holiday_name, holiday_date, + holiday_valid_from, + holiday_valid_until, cast(null as {{ dbt.type_string() }}) as holiday_start_or_end, schedule_valid_from as valid_from, schedule_valid_until as valid_until - from holiday_neighbors + from join_holidays where holiday_date is null ), valid_from_partition as( select * - , row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date) as valid_from_index + , row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index + , count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index from split_holidays where not (holiday_date is not null and holiday_start_or_end is null) +), add_end_row as( + select + schedule_id, + time_zone, + start_time_utc, + end_time_utc, + schedule_name, + schedule_valid_from, + schedule_valid_until, + holiday_name, + holiday_date, + holiday_valid_from, + holiday_valid_until, + case when valid_from_index = 1 and holiday_start_or_end is not null + then 'partition_start' + else holiday_start_or_end + end as holiday_start_or_end, + valid_from, + valid_until, + valid_from_index, + max_valid_from_index + from valid_from_partition + + union all + + select + schedule_id, + time_zone, + start_time_utc, + end_time_utc, + schedule_name, + schedule_valid_from, + schedule_valid_until, + holiday_name, + holiday_date, + holiday_valid_from, + holiday_valid_until, + 'partition_end' as holiday_start_or_end, + valid_from, + valid_until, + max_valid_from_index + 1 as valid_from_index, + max_valid_from_index + from valid_from_partition + where max_valid_from_index > 1 + and valid_from_index = max_valid_from_index + ), adjust_ranges as( select schedule_id, @@ -171,41 +209,52 @@ with schedule as ( schedule_name, holiday_name, holiday_date, - holiday_start_or_end, + holiday_valid_from, + holiday_valid_until, - case - when holiday_start_or_end = 'start' - then case when valid_from_index > 1 - then lag(holiday_date) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) - else schedule_valid_from - end - when holiday_start_or_end = 'end' - then cast({{ dbt.dateadd(datepart="day", interval=1, from_date_or_timestamp="holiday_date") }} as {{ dbt.type_timestamp() }}) - else cast(schedule_valid_from as {{ dbt.type_timestamp() }}) + case + when holiday_start_or_end = 'partition_start' + then cast({{ dbt.date_trunc("week", "schedule_valid_from") }} as {{ dbt.type_timestamp() }}) + + + else cast(lag(holiday_valid_until) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) + as {{ dbt.type_timestamp() }}) end as valid_from, case - when holiday_start_or_end = 'start' - then holiday_date - when holiday_start_or_end = 'end' - then case when valid_from_index > 1 - then coalesce( - lead(holiday_date) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index), - schedule_valid_until) - else schedule_valid_until - end - else schedule_valid_until + when holiday_start_or_end = 'partition_start' + then cast({{ dbt.date_trunc("week", "holiday_valid_from") }} as {{ dbt.type_timestamp() }}) + when holiday_start_or_end = '0' + then cast({{ dbt.dateadd("week", 1, dbt.date_trunc("week", + "lead(holiday_valid_from) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)" + )) }} as {{ dbt.type_timestamp() }}) + when holiday_start_or_end = '1' + then cast({{ dbt.dateadd("week", 1, dbt.date_trunc("week", "holiday_valid_until")) }} as {{ dbt.type_timestamp() }}) + {# then lead(holiday_valid_from) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) #} + when holiday_start_or_end = 'partition_end' + then cast({{ dbt.date_trunc("week", "schedule_valid_until") }} as {{ dbt.type_timestamp() }}) + else cast({{ dbt.date_trunc("week", "schedule_valid_until") }} as {{ dbt.type_timestamp() }}) end as valid_until, - valid_from_index - from valid_from_partition - where not (valid_from_index > 1 and holiday_start_or_end = 'start') -) + valid_from_index, + max_valid_from_index, + holiday_start_or_end + from add_end_row + where holiday_start_or_end != '0' or holiday_start_or_end is null + {# where not (valid_from_index > 1 and holiday_start_or_end = '0') #} +), final as( select schedule_id, valid_from, valid_until, start_time_utc, - end_time_utc - from adjust_ranges \ No newline at end of file + end_time_utc, + holiday_name + from adjust_ranges + +) + +select * +from adjust_ranges +{# where holiday_start_or_end != '0' or holiday_start_or_end is null #} \ No newline at end of file From 016e4492408be6bd9671c3f84091b9fdc7af79d0 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 26 Sep 2024 17:42:32 -0500 Subject: [PATCH 18/76] updates --- .../int_zendesk__schedule_spine.sql | 37 ++++++++++++------- 1 file changed, 24 insertions(+), 13 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 110d4a9b..7e8c3705 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -33,6 +33,7 @@ with schedule as ( cast(holiday.holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, cast(holiday.holiday_end_date_at as {{ dbt.type_timestamp() }}) as holiday_valid_until, -- The valid_until will then be the the day after. cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_date, + {{ dbt.date_trunc("holiday.holiday_start_date_at")}} holiday.holiday_id, holiday.holiday_name, holiday.schedule_id @@ -96,7 +97,7 @@ with schedule as ( holiday_valid_until, case when holiday_valid_from = holiday_date - then '0' + then '0_start' end as holiday_start_or_end, schedule_valid_from as valid_from, holiday_date as valid_until @@ -119,7 +120,7 @@ with schedule as ( holiday_valid_until, case when holiday_valid_until = holiday_date - then '1' + then '1_end' end as holiday_start_or_end, holiday_date as valid_from, schedule_valid_until as valid_until, @@ -215,20 +216,30 @@ with schedule as ( case when holiday_start_or_end = 'partition_start' then cast({{ dbt.date_trunc("week", "schedule_valid_from") }} as {{ dbt.type_timestamp() }}) - - - else cast(lag(holiday_valid_until) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) - as {{ dbt.type_timestamp() }}) + when holiday_start_or_end = '0_start' + then cast({{ dbt.date_trunc("week", + "lag(holiday_valid_until) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)" + ) }} as {{ dbt.type_timestamp() }}) + when holiday_start_or_end = '1_end' + then cast({{ dbt.date_trunc("week", + "lag(holiday_valid_until) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)" + ) }} as {{ dbt.type_timestamp() }}) + {# then cast({{ dbt.date_trunc("week", "holiday_valid_from") }} as {{ dbt.type_timestamp() }}) #} + {# then cast(lag(valid_until) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) + as {{ dbt.type_timestamp() }}) #} + when holiday_start_or_end = 'partition_end' + then cast({{ dbt.dateadd("week", 1, dbt.date_trunc("week", "holiday_valid_until")) }} as {{ dbt.type_timestamp() }}) + else cast({{ dbt.date_trunc("week", "schedule_valid_from") }} as {{ dbt.type_timestamp() }}) end as valid_from, case when holiday_start_or_end = 'partition_start' then cast({{ dbt.date_trunc("week", "holiday_valid_from") }} as {{ dbt.type_timestamp() }}) - when holiday_start_or_end = '0' + when holiday_start_or_end = '0_start' then cast({{ dbt.dateadd("week", 1, dbt.date_trunc("week", "lead(holiday_valid_from) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)" )) }} as {{ dbt.type_timestamp() }}) - when holiday_start_or_end = '1' + when holiday_start_or_end = '1_end' then cast({{ dbt.dateadd("week", 1, dbt.date_trunc("week", "holiday_valid_until")) }} as {{ dbt.type_timestamp() }}) {# then lead(holiday_valid_from) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) #} when holiday_start_or_end = 'partition_end' @@ -240,10 +251,10 @@ with schedule as ( max_valid_from_index, holiday_start_or_end from add_end_row - where holiday_start_or_end != '0' or holiday_start_or_end is null - {# where not (valid_from_index > 1 and holiday_start_or_end = '0') #} + where holiday_start_or_end != '0_start' or holiday_start_or_end is null + {# where not (valid_from_index > 1 and holiday_start_or_end = '0_start') #} -), final as( +{# ), final as( select schedule_id, valid_from, @@ -251,10 +262,10 @@ with schedule as ( start_time_utc, end_time_utc, holiday_name - from adjust_ranges + from adjust_ranges #} ) select * from adjust_ranges -{# where holiday_start_or_end != '0' or holiday_start_or_end is null #} \ No newline at end of file +{# where holiday_start_or_end != '0_start' or holiday_start_or_end is null #} \ No newline at end of file From 45fbcfe79903ed0ce3e199c0d03998475ae496ef Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 26 Sep 2024 18:07:57 -0500 Subject: [PATCH 19/76] updates --- .../int_zendesk__schedule_spine.sql | 97 ++++++++----------- 1 file changed, 39 insertions(+), 58 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 7e8c3705..2d47d9ef 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -33,7 +33,10 @@ with schedule as ( cast(holiday.holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, cast(holiday.holiday_end_date_at as {{ dbt.type_timestamp() }}) as holiday_valid_until, -- The valid_until will then be the the day after. cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_date, - {{ dbt.date_trunc("holiday.holiday_start_date_at")}} + cast({{ dbt.date_trunc("week", "holiday.holiday_start_date_at") }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, + cast({{ dbt.dateadd("week", 1, dbt.date_trunc( + "week", "holiday.holiday_end_date_at") + ) }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday, holiday.holiday_id, holiday.holiday_name, holiday.schedule_id @@ -74,8 +77,12 @@ with schedule as ( schedule_holiday.holiday_name, schedule_holiday.holiday_valid_from, schedule_holiday.holiday_valid_until, + schedule_holiday.holiday_starting_sunday, + schedule_holiday.holiday_ending_sunday, calculate_schedules.valid_from as schedule_valid_from, - calculate_schedules.valid_until as schedule_valid_until + calculate_schedules.valid_until as schedule_valid_until, + cast({{ dbt.date_trunc("week", "calculate_schedules.valid_from") }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, + cast({{ dbt.date_trunc("week", "calculate_schedules.valid_until") }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday from calculate_schedules left join schedule_holiday on schedule_holiday.schedule_id = calculate_schedules.schedule_id @@ -84,17 +91,7 @@ with schedule as ( ), split_holidays as( select - schedule_id, - time_zone, - start_time_utc, - end_time_utc, - schedule_name, - schedule_valid_from, - schedule_valid_until, - holiday_name, - holiday_date, - holiday_valid_from, - holiday_valid_until, + join_holidays.*, case when holiday_valid_from = holiday_date then '0_start' @@ -107,17 +104,7 @@ with schedule as ( union all select - schedule_id, - time_zone, - start_time_utc, - end_time_utc, - schedule_name, - schedule_valid_from, - schedule_valid_until, - holiday_name, - holiday_date, - holiday_valid_from, - holiday_valid_until, + join_holidays.*, case when holiday_valid_until = holiday_date then '1_end' @@ -130,17 +117,7 @@ with schedule as ( union all select - schedule_id, - time_zone, - start_time_utc, - end_time_utc, - schedule_name, - schedule_valid_from, - schedule_valid_until, - holiday_name, - holiday_date, - holiday_valid_from, - holiday_valid_until, + join_holidays.*, cast(null as {{ dbt.type_string() }}) as holiday_start_or_end, schedule_valid_from as valid_from, schedule_valid_until as valid_until @@ -149,7 +126,7 @@ with schedule as ( ), valid_from_partition as( select - * + split_holidays.* , row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index , count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index from split_holidays @@ -164,10 +141,14 @@ with schedule as ( schedule_name, schedule_valid_from, schedule_valid_until, + schedule_starting_sunday, + schedule_ending_sunday, holiday_name, holiday_date, holiday_valid_from, holiday_valid_until, + holiday_starting_sunday, + holiday_ending_sunday, case when valid_from_index = 1 and holiday_start_or_end is not null then 'partition_start' else holiday_start_or_end @@ -188,10 +169,14 @@ with schedule as ( schedule_name, schedule_valid_from, schedule_valid_until, + schedule_starting_sunday, + schedule_ending_sunday, holiday_name, holiday_date, holiday_valid_from, holiday_valid_until, + holiday_starting_sunday, + holiday_ending_sunday, 'partition_end' as holiday_start_or_end, valid_from, valid_until, @@ -212,39 +197,35 @@ with schedule as ( holiday_date, holiday_valid_from, holiday_valid_until, + holiday_starting_sunday, + holiday_ending_sunday, + schedule_valid_from, + schedule_valid_until, + schedule_starting_sunday, + schedule_ending_sunday, case when holiday_start_or_end = 'partition_start' - then cast({{ dbt.date_trunc("week", "schedule_valid_from") }} as {{ dbt.type_timestamp() }}) - when holiday_start_or_end = '0_start' - then cast({{ dbt.date_trunc("week", - "lag(holiday_valid_until) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)" - ) }} as {{ dbt.type_timestamp() }}) + then schedule_starting_sunday + {# when holiday_start_or_end = '0_start' + then holiday_starting_sunday #} when holiday_start_or_end = '1_end' - then cast({{ dbt.date_trunc("week", - "lag(holiday_valid_until) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)" - ) }} as {{ dbt.type_timestamp() }}) - {# then cast({{ dbt.date_trunc("week", "holiday_valid_from") }} as {{ dbt.type_timestamp() }}) #} - {# then cast(lag(valid_until) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) - as {{ dbt.type_timestamp() }}) #} + then holiday_starting_sunday when holiday_start_or_end = 'partition_end' - then cast({{ dbt.dateadd("week", 1, dbt.date_trunc("week", "holiday_valid_until")) }} as {{ dbt.type_timestamp() }}) - else cast({{ dbt.date_trunc("week", "schedule_valid_from") }} as {{ dbt.type_timestamp() }}) + then holiday_ending_sunday + else schedule_starting_sunday end as valid_from, case when holiday_start_or_end = 'partition_start' - then cast({{ dbt.date_trunc("week", "holiday_valid_from") }} as {{ dbt.type_timestamp() }}) - when holiday_start_or_end = '0_start' - then cast({{ dbt.dateadd("week", 1, dbt.date_trunc("week", - "lead(holiday_valid_from) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)" - )) }} as {{ dbt.type_timestamp() }}) + then holiday_starting_sunday + {# when holiday_start_or_end = '0_start' + then holiday_ending_sunday #} when holiday_start_or_end = '1_end' - then cast({{ dbt.dateadd("week", 1, dbt.date_trunc("week", "holiday_valid_until")) }} as {{ dbt.type_timestamp() }}) - {# then lead(holiday_valid_from) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) #} + then holiday_ending_sunday when holiday_start_or_end = 'partition_end' - then cast({{ dbt.date_trunc("week", "schedule_valid_until") }} as {{ dbt.type_timestamp() }}) - else cast({{ dbt.date_trunc("week", "schedule_valid_until") }} as {{ dbt.type_timestamp() }}) + then schedule_ending_sunday + else schedule_ending_sunday end as valid_until, valid_from_index, From 3907813f67a385124cfa5c02683964c527815199 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 26 Sep 2024 18:54:42 -0500 Subject: [PATCH 20/76] updates --- .../int_zendesk__schedule_spine.sql | 43 ++++++++++--------- 1 file changed, 22 insertions(+), 21 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 2d47d9ef..612b6879 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -203,50 +203,51 @@ with schedule as ( schedule_valid_until, schedule_starting_sunday, schedule_ending_sunday, - + valid_from_index, + max_valid_from_index, + holiday_start_or_end, case when holiday_start_or_end = 'partition_start' then schedule_starting_sunday - {# when holiday_start_or_end = '0_start' - then holiday_starting_sunday #} + when holiday_start_or_end = '0_start' + then lag(holiday_ending_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) when holiday_start_or_end = '1_end' then holiday_starting_sunday when holiday_start_or_end = 'partition_end' then holiday_ending_sunday else schedule_starting_sunday - end as valid_from, - + end as valid_from + , case when holiday_start_or_end = 'partition_start' then holiday_starting_sunday - {# when holiday_start_or_end = '0_start' - then holiday_ending_sunday #} + when holiday_start_or_end = '0_start' + then lead(holiday_starting_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) when holiday_start_or_end = '1_end' then holiday_ending_sunday when holiday_start_or_end = 'partition_end' then schedule_ending_sunday else schedule_ending_sunday - end as valid_until, - - valid_from_index, - max_valid_from_index, - holiday_start_or_end + end as valid_until from add_end_row - where holiday_start_or_end != '0_start' or holiday_start_or_end is null - {# where not (valid_from_index > 1 and holiday_start_or_end = '0_start') #} -{# ), final as( +), filter_dupes as( select schedule_id, - valid_from, - valid_until, + time_zone, start_time_utc, end_time_utc, - holiday_name - from adjust_ranges #} + schedule_name, + holiday_name, + holiday_date, + valid_from, + valid_until, + case when holiday_start_or_end = '1_end' then true + end as is_holiday_week + from adjust_ranges + where not (valid_from = valid_until and holiday_date is not null) ) select * -from adjust_ranges -{# where holiday_start_or_end != '0_start' or holiday_start_or_end is null #} \ No newline at end of file +from filter_dupes From 8b32c455644a590e00f10a3cf298bc68502eeca6 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Thu, 26 Sep 2024 17:23:40 -0700 Subject: [PATCH 21/76] try out in buildkite --- .../history/int_zendesk__schedule_history.sql | 70 +++++++++++++++---- 1 file changed, 58 insertions(+), 12 deletions(-) diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index a8178ed7..a7f0f20d 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -3,7 +3,7 @@ with schedule as ( select * from {{ var('schedule') }} -,) audit_logs as ( +), audit_logs as ( select _fivetran_synced, source_id as schedule_id, @@ -64,42 +64,88 @@ with schedule as ( {% if not loop.last %}union all{% endif %} {% endfor %} +{% if target.type == 'redshift '%} +-- using PartiQL syntax to work with redshift's SUPER types, which requires an extra CTE +), redshift_parse_schedule as ( + -- Redshift requires another CTE for unnesting + select + _fivetran_synced, + schedule_id, + created_at, + min_created_at, + change_description, + change_description_cleaned, + valid_from, + valid_until, + schedule_change, + change_type, + day_of_week, + day_of_week_number, + day_of_week_schedule, + json_parse('[' || replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{') || ']') as json_schedule + + from split_days + +), unnested_schedules as ( + select + _fivetran_synced, + schedule_id, + created_at, + min_created_at, + change_description, + change_description_cleaned, + valid_from, + valid_until, + schedule_change, + change_type, + day_of_week, + day_of_week_number, + -- go back to strings + cast(day_of_week_schedule as {{ dbt.type_string() }}) as day_of_week_schedule, + {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule + + from redshift_parse_schedule as schedules, schedules.json_schedule as unnested_schedule + +{% else %} ), unnested_schedules as ( select split_days.*, -{%- if target.type == 'bigquery' %} + {%- if target.type == 'bigquery' %} {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule from split_days cross join unnest(json_extract_array('[' || replace(day_of_week_schedule, ',', '},{') || ']', '$')) as unnested_schedule -{%- elif target.type == 'snowflake' %} + {%- elif target.type == 'snowflake' %} unnested_schedule.key || ':' || unnested_schedule.value as cleaned_unnested_schedule from split_days cross join lateral flatten(input => parse_json(replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{'))) as unnested_schedule -{%- elif target.type == 'postgres' %} + {%- elif target.type == 'postgres' %} {{ clean_schedule('unnested_schedule::text') }} as cleaned_unnested_schedule from split_days cross join lateral jsonb_array_elements(('[' || replace(day_of_week_schedule, ',', '},{') || ']')::jsonb) as unnested_schedule -{%- elif target.type in ('databricks', 'spark') %} + {%- elif target.type in ('databricks', 'spark') %} {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule from split_days lateral view explode(from_json(concat('[', replace(day_of_week_schedule, ',', '},{'), ']'), 'array')) as unnested_schedule -{%- elif target.type == 'redshift' %} - {# json_parse('[' || replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{') || ']') as json_schedule - from split_days #} - {# cross join lateral json_parse(replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{')) as element #} + {# {%- elif target.type == 'redshift' %} + + json_parse('[' || replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{') || ']') as json_schedule + from split_days + cross join lateral json_parse(replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{')) as element cast(null as {{ dbt.type_string() }}) as cleaned_unnested_schedule - from split_days + from split_days #} -{% else %} + {% else %} cast(null as {{ dbt.type_string() }}) as cleaned_unnested_schedule from split_days -{%- endif %} + {%- endif %} + +{% endif %} ), split_times as ( From 9c493998ee06adfae7d0dbf5396c2f42c75b2d6f Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 26 Sep 2024 22:29:34 -0500 Subject: [PATCH 22/76] remove schedule days during holiday --- .../history/int_zendesk__schedule_history.sql | 2 +- .../int_zendesk__schedule_spine.sql | 75 ++++++++++++++++--- 2 files changed, 67 insertions(+), 10 deletions(-) diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index ca5a116b..d21a0bf6 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -3,7 +3,7 @@ with schedule as ( select * from {{ var('schedule') }} -,) audit_logs as ( +), audit_logs as ( select _fivetran_synced, source_id as schedule_id, diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 612b6879..471c07c0 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -30,16 +30,16 @@ with schedule as ( select holiday._fivetran_synced, + holiday.holiday_id, + holiday.holiday_name, + holiday.schedule_id, cast(holiday.holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, cast(holiday.holiday_end_date_at as {{ dbt.type_timestamp() }}) as holiday_valid_until, -- The valid_until will then be the the day after. cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_date, cast({{ dbt.date_trunc("week", "holiday.holiday_start_date_at") }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, cast({{ dbt.dateadd("week", 1, dbt.date_trunc( "week", "holiday.holiday_end_date_at") - ) }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday, - holiday.holiday_id, - holiday.holiday_name, - holiday.schedule_id + ) }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday from holiday inner join calendar_spine @@ -57,7 +57,7 @@ with schedule as ( schedule.schedule_name, schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, - coalesce(split_timezones.offset_minutes, 0) as offset_minutes_to_add, + coalesce(split_timezones.offset_minutes, 0) as offset_minutes, -- we'll use these to determine which schedule version to associate tickets with cast(split_timezones.valid_from as {{ dbt.type_timestamp() }}) as valid_from, cast(split_timezones.valid_until as {{ dbt.type_timestamp() }}) as valid_until @@ -70,6 +70,7 @@ with schedule as ( select calculate_schedules.schedule_id, calculate_schedules.time_zone, + calculate_schedules.offset_minutes, calculate_schedules.start_time_utc, calculate_schedules.end_time_utc, calculate_schedules.schedule_name, @@ -136,6 +137,7 @@ with schedule as ( select schedule_id, time_zone, + offset_minutes, start_time_utc, end_time_utc, schedule_name, @@ -164,6 +166,7 @@ with schedule as ( select schedule_id, time_zone, + offset_minutes, start_time_utc, end_time_utc, schedule_name, @@ -190,6 +193,7 @@ with schedule as ( select schedule_id, time_zone, + offset_minutes, start_time_utc, end_time_utc, schedule_name, @@ -231,23 +235,76 @@ with schedule as ( end as valid_until from add_end_row -), filter_dupes as( +), holiday_weeks as( select schedule_id, time_zone, + offset_minutes, start_time_utc, end_time_utc, schedule_name, holiday_name, - holiday_date, + holiday_valid_from, + holiday_valid_until, valid_from, valid_until, case when holiday_start_or_end = '1_end' then true end as is_holiday_week from adjust_ranges where not (valid_from = valid_until and holiday_date is not null) - + +), valid_minutes as( + select + holiday_weeks.*, + -- Calculate holiday_valid_from in minutes from Sunday + case when is_holiday_week then ( + {% if target.type in ('bigquery', 'databricks') %} + -- BigQuery and Databricks use DAYOFWEEK where Sunday = 1, so subtract 1 to make Sunday = 0 + ((extract(dayofweek from holiday_valid_from) - 1) * 24 * 60) + {% else %} + -- Snowflake and Postgres use DOW where Sunday = 0 + (extract(dow from holiday_valid_from) * 24 * 60) + {% endif %} + + extract(hour from holiday_valid_from) * 60 -- Get hours and convert to minutes + + extract(minute from holiday_valid_from) -- Get minutes + - offset_minutes -- Timezone adjustment + ) + else null end as holiday_valid_from_minutes_from_sunday, + + -- Calculate holiday_valid_until in minutes from Sunday + case when is_holiday_week then ( + ( + {% if target.type in ('bigquery', 'databricks') %} + (extract(dayofweek from holiday_valid_until) - 1) + {% else %} + (extract(dow from holiday_valid_until)) + {% endif %} + + 1) * 24 * 60 -- add 1 day to set the upper bound of the holiday + + extract(hour from holiday_valid_until) * 60 + + extract(minute from holiday_valid_until) + - offset_minutes + ) + else null end as holiday_valid_until_minutes_from_sunday + from holiday_weeks + +), remove_holiday_schedule as( + select + valid_minutes.* + from valid_minutes + where not (start_time_utc < holiday_valid_until_minutes_from_sunday + and end_time_utc > holiday_valid_from_minutes_from_sunday) + or is_holiday_week is null + +), final as( + select + schedule_id, + valid_from, + valid_until, + start_time_utc, + end_time_utc, + is_holiday_week + from remove_holiday_schedule ) select * -from filter_dupes +from final From e4bf09f708124847465d0dcc4bb369d61306568e Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 26 Sep 2024 23:04:18 -0500 Subject: [PATCH 23/76] revise --- models/intermediate/int_zendesk__schedule_spine.sql | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 471c07c0..66f6fec7 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -53,7 +53,6 @@ with schedule as ( lower(schedule.time_zone) as time_zone, schedule.start_time, schedule.end_time, - {# schedule.created_at, #} schedule.schedule_name, schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, @@ -251,7 +250,7 @@ with schedule as ( case when holiday_start_or_end = '1_end' then true end as is_holiday_week from adjust_ranges - where not (valid_from = valid_until and holiday_date is not null) + where not (valid_from >= valid_until and holiday_date is not null) ), valid_minutes as( select From 4523d2c5b2a81613d5da3d0df5a4c042ea6ca47d Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 26 Sep 2024 23:05:07 -0500 Subject: [PATCH 24/76] revise --- models/intermediate/int_zendesk__schedule_spine.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 66f6fec7..2a29dfc3 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -110,7 +110,7 @@ with schedule as ( then '1_end' end as holiday_start_or_end, holiday_date as valid_from, - schedule_valid_until as valid_until, + schedule_valid_until as valid_until from join_holidays where holiday_date is not null From cceae3f8dd8cf696aa8432cec01bf8217e96d6f7 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 26 Sep 2024 23:06:10 -0500 Subject: [PATCH 25/76] add config --- models/history/int_zendesk__schedule_history.sql | 2 ++ 1 file changed, 2 insertions(+) diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index d21a0bf6..b7dd85f5 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -1,3 +1,5 @@ +{{ config(enabled=var('using_schedules', True)) }} + with schedule as ( select * From 62c5fa946f695902be90f3c5f65294caa2679dcf Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 26 Sep 2024 23:11:23 -0500 Subject: [PATCH 26/76] add config --- models/intermediate/int_zendesk__schedule_spine.sql | 2 +- models/utils/int_zendesk__timezone_daylight.sql | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 2a29dfc3..d62f0bee 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -34,7 +34,7 @@ with schedule as ( holiday.holiday_name, holiday.schedule_id, cast(holiday.holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, - cast(holiday.holiday_end_date_at as {{ dbt.type_timestamp() }}) as holiday_valid_until, -- The valid_until will then be the the day after. + cast(holiday.holiday_end_date_at as {{ dbt.type_timestamp() }}) as holiday_valid_until, cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_date, cast({{ dbt.date_trunc("week", "holiday.holiday_start_date_at") }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, cast({{ dbt.dateadd("week", 1, dbt.date_trunc( diff --git a/models/utils/int_zendesk__timezone_daylight.sql b/models/utils/int_zendesk__timezone_daylight.sql index f3974c1a..ee3c5a42 100644 --- a/models/utils/int_zendesk__timezone_daylight.sql +++ b/models/utils/int_zendesk__timezone_daylight.sql @@ -1,3 +1,5 @@ +{{ config(enabled=var('using_schedules', True)) }} + with timezone as ( select * From 1db42a79b10e663675f7a83e127b8047a12cd496 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 26 Sep 2024 23:23:52 -0500 Subject: [PATCH 27/76] allow disable holidays --- .../int_zendesk__schedule_spine.sql | 36 +++++++++++++------ 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index d62f0bee..ea0afb9a 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -10,11 +10,6 @@ with schedule as ( select * from {{ var('schedule') }} -), holiday as ( - - select * - from {{ var('schedule_holiday') }} - ), calendar_spine as ( select * @@ -25,6 +20,12 @@ with schedule as ( select * from {{ ref('int_zendesk__timezone_daylight') }} +{% if var('using_holidays', True) %} +), holiday as ( + + select * + from {{ var('schedule_holiday') }} + -- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules. ), schedule_holiday as ( @@ -45,6 +46,7 @@ with schedule as ( inner join calendar_spine on holiday_start_date_at <= cast(date_day as {{ dbt.type_timestamp() }} ) and holiday_end_date_at >= cast(date_day as {{ dbt.type_timestamp() }} ) +{% endif %} ), calculate_schedules as ( @@ -73,21 +75,35 @@ with schedule as ( calculate_schedules.start_time_utc, calculate_schedules.end_time_utc, calculate_schedules.schedule_name, + calculate_schedules.valid_from as schedule_valid_from, + calculate_schedules.valid_until as schedule_valid_until, + cast({{ dbt.date_trunc("week", "calculate_schedules.valid_from") }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, + cast({{ dbt.date_trunc("week", "calculate_schedules.valid_until") }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday, + + {% if var('using_holidays', True) %} schedule_holiday.holiday_date, schedule_holiday.holiday_name, schedule_holiday.holiday_valid_from, schedule_holiday.holiday_valid_until, schedule_holiday.holiday_starting_sunday, - schedule_holiday.holiday_ending_sunday, - calculate_schedules.valid_from as schedule_valid_from, - calculate_schedules.valid_until as schedule_valid_until, - cast({{ dbt.date_trunc("week", "calculate_schedules.valid_from") }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, - cast({{ dbt.date_trunc("week", "calculate_schedules.valid_until") }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday + schedule_holiday.holiday_ending_sunday + {% else %} + cast(null as {{ dbt.type_timestamp() }}) as holiday_date, + cast(null as {{ dbt.type_string() }}) as holiday_name, + cast(null as {{ dbt.type_timestamp() }}) as holiday_valid_from, + cast(null as {{ dbt.type_timestamp() }}) as holiday_valid_until, + cast(null as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, + cast(null as {{ dbt.type_timestamp() }}) as holiday_ending_sunday + {% endif %} + from calculate_schedules + + {% if var('using_holidays', True) %} left join schedule_holiday on schedule_holiday.schedule_id = calculate_schedules.schedule_id and schedule_holiday.holiday_date <= calculate_schedules.valid_until and schedule_holiday.holiday_date >= calculate_schedules.valid_from + {% endif %} ), split_holidays as( select From f73090847ec6f8f6377fb037b8fd940aa9a5f625 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Fri, 27 Sep 2024 09:08:47 -0700 Subject: [PATCH 28/76] streamline bk run --- .buildkite/scripts/run_models.sh | 6 +++--- models/history/int_zendesk__schedule_history.sql | 2 ++ 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.buildkite/scripts/run_models.sh b/.buildkite/scripts/run_models.sh index 5192f94a..876bafee 100644 --- a/.buildkite/scripts/run_models.sh +++ b/.buildkite/scripts/run_models.sh @@ -17,9 +17,9 @@ echo `pwd` cd integration_tests dbt deps dbt seed --target "$db" --full-refresh -dbt run --target "$db" --full-refresh -dbt test --target "$db" -dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh +dbt run -m +int_zendesk__schedule_history --target "$db" --full-refresh dbt test --target "$db" +# dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh +# dbt test --target "$db" # dbt run-operation fivetran_utils.drop_schemas_automation --target "$db" \ No newline at end of file diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index a7f0f20d..b05462a3 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -1,3 +1,5 @@ +{{ config(enabled=var('using_schedules', True)) }} + with schedule as ( select * From ee996160867783586ca6dd24daaf5b67c763fdde Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 27 Sep 2024 18:17:54 -0500 Subject: [PATCH 29/76] adjust for multiple holidays in a week --- .../int_zendesk__schedule_spine.sql | 75 ++++++++++++++----- 1 file changed, 58 insertions(+), 17 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index ea0afb9a..b43f374b 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -31,7 +31,7 @@ with schedule as ( select holiday._fivetran_synced, - holiday.holiday_id, + cast(holiday.holiday_id as {{ dbt.type_string*() }}) as holiday_id, holiday.holiday_name, holiday.schedule_id, cast(holiday.holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, @@ -60,8 +60,8 @@ with schedule as ( schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, coalesce(split_timezones.offset_minutes, 0) as offset_minutes, -- we'll use these to determine which schedule version to associate tickets with - cast(split_timezones.valid_from as {{ dbt.type_timestamp() }}) as valid_from, - cast(split_timezones.valid_until as {{ dbt.type_timestamp() }}) as valid_until + cast(split_timezones.valid_from as {{ dbt.type_timestamp() }}) as schedule_valid_from, + cast(split_timezones.valid_until as {{ dbt.type_timestamp() }}) as schedule_valid_until from schedule left join split_timezones @@ -75,13 +75,14 @@ with schedule as ( calculate_schedules.start_time_utc, calculate_schedules.end_time_utc, calculate_schedules.schedule_name, - calculate_schedules.valid_from as schedule_valid_from, - calculate_schedules.valid_until as schedule_valid_until, + calculate_schedules.schedule_valid_from, + calculate_schedules.schedule_valid_until, cast({{ dbt.date_trunc("week", "calculate_schedules.valid_from") }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, cast({{ dbt.date_trunc("week", "calculate_schedules.valid_until") }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday, {% if var('using_holidays', True) %} schedule_holiday.holiday_date, + schedule_holiday.holiday_id, schedule_holiday.holiday_name, schedule_holiday.holiday_valid_from, schedule_holiday.holiday_valid_until, @@ -89,6 +90,7 @@ with schedule as ( schedule_holiday.holiday_ending_sunday {% else %} cast(null as {{ dbt.type_timestamp() }}) as holiday_date, + cast(null as {{ dbt.type_string() }}) as holiday_id, cast(null as {{ dbt.type_string() }}) as holiday_name, cast(null as {{ dbt.type_timestamp() }}) as holiday_valid_from, cast(null as {{ dbt.type_timestamp() }}) as holiday_valid_until, @@ -106,11 +108,12 @@ with schedule as ( {% endif %} ), split_holidays as( + -- create records for the first day of the holiday select join_holidays.*, case when holiday_valid_from = holiday_date - then '0_start' + then '0_start' -- the number is for ordering later end as holiday_start_or_end, schedule_valid_from as valid_from, holiday_date as valid_until @@ -119,11 +122,12 @@ with schedule as ( union all + -- create records for the last day of the holiday select join_holidays.*, case when holiday_valid_until = holiday_date - then '1_end' + then '1_end' -- the number is for ordering later end as holiday_start_or_end, holiday_date as valid_from, schedule_valid_until as valid_until @@ -132,6 +136,7 @@ with schedule as ( union all + -- keep records for weeks with no holiday select join_holidays.*, cast(null as {{ dbt.type_string() }}) as holiday_start_or_end, @@ -142,13 +147,13 @@ with schedule as ( ), valid_from_partition as( select - split_holidays.* - , row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index - , count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index + split_holidays.*, + row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index, + count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index from split_holidays where not (holiday_date is not null and holiday_start_or_end is null) -), add_end_row as( +), add_partition_end_row as( select schedule_id, time_zone, @@ -160,6 +165,7 @@ with schedule as ( schedule_valid_until, schedule_starting_sunday, schedule_ending_sunday, + holiday_id, holiday_name, holiday_date, holiday_valid_from, @@ -178,6 +184,7 @@ with schedule as ( union all + -- when max_valid_from_index > 1, then we want to duplicate the last row to end the partition. select schedule_id, time_zone, @@ -189,6 +196,7 @@ with schedule as ( schedule_valid_until, schedule_starting_sunday, schedule_ending_sunday, + holiday_id, holiday_name, holiday_date, holiday_valid_from, @@ -212,6 +220,7 @@ with schedule as ( start_time_utc, end_time_utc, schedule_name, + holiday_id, holiday_name, holiday_date, holiday_valid_from, @@ -248,7 +257,7 @@ with schedule as ( then schedule_ending_sunday else schedule_ending_sunday end as valid_until - from add_end_row + from add_partition_end_row ), holiday_weeks as( select @@ -258,6 +267,7 @@ with schedule as ( start_time_utc, end_time_utc, schedule_name, + holiday_id, holiday_name, holiday_valid_from, holiday_valid_until, @@ -302,13 +312,40 @@ with schedule as ( else null end as holiday_valid_until_minutes_from_sunday from holiday_weeks -), remove_holiday_schedule as( +), find_holidays as( select - valid_minutes.* + schedule_id, + valid_from, + valid_until, + start_time_utc, + end_time_utc, + holiday_id, + case + when start_time_utc < holiday_valid_until_minutes_from_sunday + and end_time_utc > holiday_valid_from_minutes_from_sunday + and is_holiday_week is not null + then holiday_name + else cast(null as {{ dbt.type_string() }}) + end as holiday_name, + is_holiday_week, + count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc) as number_holiday_ids_in_week from valid_minutes - where not (start_time_utc < holiday_valid_until_minutes_from_sunday - and end_time_utc > holiday_valid_from_minutes_from_sunday) - or is_holiday_week is null + +), filter_holidays as( + select + *, + 1 as number_records_for_schedule_start_end + from find_holidays + where number_holiday_ids_in_week = 1 + + union all + + -- we want to count the number of records for each schedule start_time_utc and end_time_utc for comparison later + select + distinct *, + count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_id) as number_records_for_schedule_start_end + from find_holidays + where number_holiday_ids_in_week > 1 ), final as( select @@ -319,6 +356,10 @@ with schedule as ( end_time_utc, is_holiday_week from remove_holiday_schedule + -- This filter is for multiple holiday ids in 1 week. We want to check for each schedule start_time_utc and end_time_utc + -- that the holiday_id count matches the number of distinct records. + -- When rows that don't match, that indicates there is a holiday on that day, and we'll filter them out. + where number_holiday_ids_in_week = number_records_for_schedule_start_end ) select * From 4f395dec0b593733fa05beeb4c8bec7defecfe15 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 27 Sep 2024 18:26:59 -0500 Subject: [PATCH 30/76] add casting --- models/intermediate/int_zendesk__schedule_spine.sql | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index b43f374b..cf4a21fd 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -334,7 +334,7 @@ with schedule as ( ), filter_holidays as( select *, - 1 as number_records_for_schedule_start_end + cast(1 as {{ dbt.type_int() }}) as number_records_for_schedule_start_end from find_holidays where number_holiday_ids_in_week = 1 @@ -343,7 +343,8 @@ with schedule as ( -- we want to count the number of records for each schedule start_time_utc and end_time_utc for comparison later select distinct *, - count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_id) as number_records_for_schedule_start_end + cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_id) + as {{ dbt.type_int() }}) as number_records_for_schedule_start_end from find_holidays where number_holiday_ids_in_week > 1 From 8613f8a8270b1907bd8241ce0b0bf816e1017128 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 27 Sep 2024 18:34:16 -0500 Subject: [PATCH 31/76] add casting --- models/intermediate/int_zendesk__schedule_spine.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index cf4a21fd..37007176 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -31,7 +31,7 @@ with schedule as ( select holiday._fivetran_synced, - cast(holiday.holiday_id as {{ dbt.type_string*() }}) as holiday_id, + cast(holiday.holiday_id as {{ dbt.type_string() }}) as holiday_id, holiday.holiday_name, holiday.schedule_id, cast(holiday.holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, From f762ca170a0581cd1193a370b72cf0719804b9ce Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 27 Sep 2024 18:42:31 -0500 Subject: [PATCH 32/76] fixes --- models/intermediate/int_zendesk__schedule_spine.sql | 1 + 1 file changed, 1 insertion(+) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 37007176..ed001762 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -361,6 +361,7 @@ with schedule as ( -- that the holiday_id count matches the number of distinct records. -- When rows that don't match, that indicates there is a holiday on that day, and we'll filter them out. where number_holiday_ids_in_week = number_records_for_schedule_start_end + and holiday_name is null -- this will remove schedules that fall on a holiday ) select * From 00a98cb7f151c076991031ad3d7e3b89b19210c0 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 27 Sep 2024 18:44:01 -0500 Subject: [PATCH 33/76] fixes --- models/intermediate/int_zendesk__schedule_spine.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index ed001762..4fde04b6 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -356,7 +356,7 @@ with schedule as ( start_time_utc, end_time_utc, is_holiday_week - from remove_holiday_schedule + from filter_holidays -- This filter is for multiple holiday ids in 1 week. We want to check for each schedule start_time_utc and end_time_utc -- that the holiday_id count matches the number of distinct records. -- When rows that don't match, that indicates there is a holiday on that day, and we'll filter them out. From 96be5ae13e10560048ad09030bca56a3618be4e1 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 27 Sep 2024 18:45:51 -0500 Subject: [PATCH 34/76] fixes --- models/intermediate/int_zendesk__schedule_spine.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 4fde04b6..b70fa472 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -77,8 +77,8 @@ with schedule as ( calculate_schedules.schedule_name, calculate_schedules.schedule_valid_from, calculate_schedules.schedule_valid_until, - cast({{ dbt.date_trunc("week", "calculate_schedules.valid_from") }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, - cast({{ dbt.date_trunc("week", "calculate_schedules.valid_until") }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday, + cast({{ dbt.date_trunc("week", "calculate_schedules.schedule_valid_from") }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, + cast({{ dbt.date_trunc("week", "calculate_schedules.schedule_valid_until") }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday, {% if var('using_holidays', True) %} schedule_holiday.holiday_date, From 751705793c4901b297906f72e12a955c4ae52354 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 27 Sep 2024 18:56:48 -0500 Subject: [PATCH 35/76] fixes --- models/intermediate/int_zendesk__schedule_spine.sql | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index b70fa472..10c4bd7c 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -103,8 +103,8 @@ with schedule as ( {% if var('using_holidays', True) %} left join schedule_holiday on schedule_holiday.schedule_id = calculate_schedules.schedule_id - and schedule_holiday.holiday_date <= calculate_schedules.valid_until - and schedule_holiday.holiday_date >= calculate_schedules.valid_from + and schedule_holiday.holiday_date <= calculate_schedules.schedule_valid_until + and schedule_holiday.holiday_date >= calculate_schedules.schedule_valid_from {% endif %} ), split_holidays as( From 184e639039d9c605dbcee3832b6ae83e27752983 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 27 Sep 2024 19:29:28 -0500 Subject: [PATCH 36/76] fixes --- .../int_zendesk__schedule_spine.sql | 23 +++++++------------ 1 file changed, 8 insertions(+), 15 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 10c4bd7c..891a0f88 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -31,7 +31,6 @@ with schedule as ( select holiday._fivetran_synced, - cast(holiday.holiday_id as {{ dbt.type_string() }}) as holiday_id, holiday.holiday_name, holiday.schedule_id, cast(holiday.holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, @@ -82,7 +81,6 @@ with schedule as ( {% if var('using_holidays', True) %} schedule_holiday.holiday_date, - schedule_holiday.holiday_id, schedule_holiday.holiday_name, schedule_holiday.holiday_valid_from, schedule_holiday.holiday_valid_until, @@ -90,7 +88,6 @@ with schedule as ( schedule_holiday.holiday_ending_sunday {% else %} cast(null as {{ dbt.type_timestamp() }}) as holiday_date, - cast(null as {{ dbt.type_string() }}) as holiday_id, cast(null as {{ dbt.type_string() }}) as holiday_name, cast(null as {{ dbt.type_timestamp() }}) as holiday_valid_from, cast(null as {{ dbt.type_timestamp() }}) as holiday_valid_until, @@ -165,7 +162,6 @@ with schedule as ( schedule_valid_until, schedule_starting_sunday, schedule_ending_sunday, - holiday_id, holiday_name, holiday_date, holiday_valid_from, @@ -196,7 +192,6 @@ with schedule as ( schedule_valid_until, schedule_starting_sunday, schedule_ending_sunday, - holiday_id, holiday_name, holiday_date, holiday_valid_from, @@ -220,7 +215,6 @@ with schedule as ( start_time_utc, end_time_utc, schedule_name, - holiday_id, holiday_name, holiday_date, holiday_valid_from, @@ -267,13 +261,13 @@ with schedule as ( start_time_utc, end_time_utc, schedule_name, - holiday_id, holiday_name, holiday_valid_from, holiday_valid_until, valid_from, valid_until, case when holiday_start_or_end = '1_end' then true + else false end as is_holiday_week from adjust_ranges where not (valid_from >= valid_until and holiday_date is not null) @@ -319,16 +313,15 @@ with schedule as ( valid_until, start_time_utc, end_time_utc, - holiday_id, case when start_time_utc < holiday_valid_until_minutes_from_sunday and end_time_utc > holiday_valid_from_minutes_from_sunday - and is_holiday_week is not null + and is_holiday_week then holiday_name else cast(null as {{ dbt.type_string() }}) end as holiday_name, is_holiday_week, - count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc) as number_holiday_ids_in_week + count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc) as number_holidays_in_week from valid_minutes ), filter_holidays as( @@ -336,17 +329,17 @@ with schedule as ( *, cast(1 as {{ dbt.type_int() }}) as number_records_for_schedule_start_end from find_holidays - where number_holiday_ids_in_week = 1 + where number_holidays_in_week = 1 union all -- we want to count the number of records for each schedule start_time_utc and end_time_utc for comparison later select distinct *, - cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_id) + cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_name) as {{ dbt.type_int() }}) as number_records_for_schedule_start_end from find_holidays - where number_holiday_ids_in_week > 1 + where number_holidays_in_week > 1 ), final as( select @@ -358,9 +351,9 @@ with schedule as ( is_holiday_week from filter_holidays -- This filter is for multiple holiday ids in 1 week. We want to check for each schedule start_time_utc and end_time_utc - -- that the holiday_id count matches the number of distinct records. + -- that the holiday count matches the number of distinct records. -- When rows that don't match, that indicates there is a holiday on that day, and we'll filter them out. - where number_holiday_ids_in_week = number_records_for_schedule_start_end + where number_holidays_in_week = number_records_for_schedule_start_end and holiday_name is null -- this will remove schedules that fall on a holiday ) From c594f3f8397aa41614ca22df16e738f718da6345 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 27 Sep 2024 23:14:21 -0500 Subject: [PATCH 37/76] fix multiyear schedules --- .../int_zendesk__schedule_spine.sql | 94 +++++++++++++++---- 1 file changed, 78 insertions(+), 16 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 891a0f88..26161485 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -12,7 +12,8 @@ with schedule as ( ), calendar_spine as ( - select * + select + cast(date_day as {{ dbt.type_timestamp() }} ) as date_day from {{ ref('int_zendesk__calendar_spine') }} ), split_timezones as ( @@ -23,28 +24,89 @@ with schedule as ( {% if var('using_holidays', True) %} ), holiday as ( - select * + select + _fivetran_synced, + holiday_name, + schedule_id, + cast(holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, + cast(holiday_end_date_at as {{ dbt.type_timestamp() }}) as holiday_valid_until, + cast({{ dbt.date_trunc("week", "holiday_start_date_at") }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, + cast({{ dbt.dateadd("week", 1, dbt.date_trunc( + "week", "holiday_end_date_at") + ) }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday from {{ var('schedule_holiday') }} +), holiday_multiple_weeks_check as ( + + select + holiday.*, + -- calculate weeks the holiday range spans. Takes into account if the holiday extends into the next year. + (extract(week from holiday_ending_sunday) + extract(year from holiday_ending_sunday) * 52) + - (extract(week from holiday_starting_sunday) + extract(year from holiday_starting_sunday) * 52) + as holiday_weeks_spanned + from holiday + +), split_multiweek_holidays as ( + + select + _fivetran_synced, + holiday_name, + schedule_id, + holiday_valid_from, + holiday_valid_until, + holiday_starting_sunday, + holiday_ending_sunday, + holiday_weeks_spanned + from holiday_multiple_weeks_check + where holiday_weeks_spanned = 1 + + union all + + -- Split holidays that span a weekend + select + _fivetran_synced, + holiday_name, + schedule_id, + holiday_valid_from, + cast({{ dbt.last_day('holiday_valid_from', 'week') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until, + holiday_starting_sunday, + cast({{ dbt.dateadd('week', 1, 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday, + holiday_weeks_spanned + from holiday_multiple_weeks_check + where holiday_weeks_spanned > 1 + + union all + + -- Split holidays that span a weekend + select + _fivetran_synced, + holiday_name, + schedule_id, + cast({{ dbt.date_trunc('week', 'holiday_valid_until') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from, + holiday_valid_until, + cast({{ dbt.dateadd('week', -1, 'holiday_ending_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, + holiday_ending_sunday, + holiday_weeks_spanned + from holiday_multiple_weeks_check + where holiday_weeks_spanned > 1 + -- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules. ), schedule_holiday as ( select - holiday._fivetran_synced, - holiday.holiday_name, - holiday.schedule_id, - cast(holiday.holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, - cast(holiday.holiday_end_date_at as {{ dbt.type_timestamp() }}) as holiday_valid_until, - cast(calendar_spine.date_day as {{ dbt.type_timestamp() }} ) as holiday_date, - cast({{ dbt.date_trunc("week", "holiday.holiday_start_date_at") }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, - cast({{ dbt.dateadd("week", 1, dbt.date_trunc( - "week", "holiday.holiday_end_date_at") - ) }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday - - from holiday + split_multiweek_holidays._fivetran_synced, + split_multiweek_holidays.holiday_name, + split_multiweek_holidays.schedule_id, + split_multiweek_holidays.holiday_valid_from, + split_multiweek_holidays.holiday_valid_until, + split_multiweek_holidays.holiday_starting_sunday, + split_multiweek_holidays.holiday_ending_sunday, + calendar_spine.date_day as holiday_date + from split_multiweek_holidays inner join calendar_spine - on holiday_start_date_at <= cast(date_day as {{ dbt.type_timestamp() }} ) - and holiday_end_date_at >= cast(date_day as {{ dbt.type_timestamp() }} ) + on holiday_valid_from <= date_day + and holiday_valid_until >= date_day + {% endif %} ), calculate_schedules as ( From e6134803ec2f2060b3e8c0b004c6d4e465d5153a Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Mon, 30 Sep 2024 10:56:52 -0500 Subject: [PATCH 38/76] add longer holiday support --- .../int_zendesk__schedule_spine.sql | 19 +++++++++++++++++-- packages.yml | 2 +- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 26161485..87dc15f7 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -62,7 +62,7 @@ with schedule as ( union all - -- Split holidays that span a weekend + -- Split holidays that span a weekend. This is for the first half. select _fivetran_synced, holiday_name, @@ -77,7 +77,7 @@ with schedule as ( union all - -- Split holidays that span a weekend + -- Split holidays that span a weekend. This is for the last half. select _fivetran_synced, holiday_name, @@ -90,6 +90,21 @@ with schedule as ( from holiday_multiple_weeks_check where holiday_weeks_spanned > 1 + union all + + -- Fill holidays that span more than two weeks. This will fill entire weeks for those sandwiched between the ends. + select + _fivetran_synced, + holiday_name, + schedule_id, + cast({{ dbt.dateadd('week', 1, 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from, + cast({{ dbt.dateadd('week', -1, 'holiday_ending_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until, + cast({{ dbt.dateadd('week', 1, 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, + cast({{ dbt.dateadd('week', -1, 'holiday_ending_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday, + holiday_weeks_spanned + from holiday_multiple_weeks_check + where holiday_weeks_spanned > 2 + -- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules. ), schedule_holiday as ( diff --git a/packages.yml b/packages.yml index 85aac10f..efe428a1 100644 --- a/packages.yml +++ b/packages.yml @@ -2,7 +2,7 @@ packages: # - package: fivetran/zendesk_source # version: [">=0.12.0", "<0.13.0"] - git: https://github.com/fivetran/dbt_zendesk_source.git - revision: explore/audit-log-spike + revision: feature/historical-schedules warn-unpinned: false - package: calogica/dbt_date version: [">=0.9.0", "<1.0.0"] From a49f0d638dfe6a85835588880b3708a06be06fbe Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Tue, 1 Oct 2024 00:57:49 -0500 Subject: [PATCH 39/76] revert multiweek --- models/intermediate/int_zendesk__schedule_spine.sql | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 87dc15f7..e9754c42 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -33,7 +33,7 @@ with schedule as ( cast({{ dbt.date_trunc("week", "holiday_start_date_at") }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, cast({{ dbt.dateadd("week", 1, dbt.date_trunc( "week", "holiday_end_date_at") - ) }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday + ) }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday -- the next sunday after the holiday ends from {{ var('schedule_holiday') }} ), holiday_multiple_weeks_check as ( @@ -90,7 +90,7 @@ with schedule as ( from holiday_multiple_weeks_check where holiday_weeks_spanned > 1 - union all + {# union all -- Fill holidays that span more than two weeks. This will fill entire weeks for those sandwiched between the ends. select @@ -98,12 +98,12 @@ with schedule as ( holiday_name, schedule_id, cast({{ dbt.dateadd('week', 1, 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from, - cast({{ dbt.dateadd('week', -1, 'holiday_ending_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until, + cast({{ dbt.dateadd('week', -1, dbt.dateadd('day', -1, 'holiday_ending_sunday')) }} as {{ dbt.type_timestamp() }}) as holiday_valid_until, cast({{ dbt.dateadd('week', 1, 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, cast({{ dbt.dateadd('week', -1, 'holiday_ending_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday, holiday_weeks_spanned from holiday_multiple_weeks_check - where holiday_weeks_spanned > 2 + where holiday_weeks_spanned > 2 #} -- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules. ), schedule_holiday as ( From e1f9d32d8cfba2f89dabee65d382e03b83e238fa Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Tue, 1 Oct 2024 02:55:22 -0500 Subject: [PATCH 40/76] adjust multiweek --- .../int_zendesk__schedule_spine.sql | 126 +++++++++--------- models/utils/int_zendesk__calendar_spine.sql | 2 +- 2 files changed, 62 insertions(+), 66 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index e9754c42..f2e5c236 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -10,17 +10,36 @@ with schedule as ( select * from {{ var('schedule') }} +), split_timezones as ( + + select * + from {{ ref('int_zendesk__timezone_daylight') }} + +), calculate_schedules as ( + + select + schedule.schedule_id, + lower(schedule.time_zone) as time_zone, + schedule.start_time, + schedule.end_time, + schedule.schedule_name, + schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, + schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, + coalesce(split_timezones.offset_minutes, 0) as offset_minutes, + -- we'll use these to determine which schedule version to associate tickets with + cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as schedule_valid_from, + cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_valid_until + + from schedule + left join split_timezones + on split_timezones.time_zone = lower(schedule.time_zone) + ), calendar_spine as ( select cast(date_day as {{ dbt.type_timestamp() }} ) as date_day from {{ ref('int_zendesk__calendar_spine') }} -), split_timezones as ( - - select * - from {{ ref('int_zendesk__timezone_daylight') }} - {% if var('using_holidays', True) %} ), holiday as ( @@ -30,9 +49,9 @@ with schedule as ( schedule_id, cast(holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, cast(holiday_end_date_at as {{ dbt.type_timestamp() }}) as holiday_valid_until, - cast({{ dbt.date_trunc("week", "holiday_start_date_at") }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, - cast({{ dbt.dateadd("week", 1, dbt.date_trunc( - "week", "holiday_end_date_at") + cast({{ dbt.date_trunc('week', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, + cast({{ dbt.dateadd('week', 1, dbt.date_trunc( + 'week', 'holiday_end_date_at') ) }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday -- the next sunday after the holiday ends from {{ var('schedule_holiday') }} @@ -46,8 +65,19 @@ with schedule as ( as holiday_weeks_spanned from holiday +), expanded_holidays as ( + select + holiday_multiple_weeks_check.*, + cast(numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number + from holiday_multiple_weeks_check + -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks + cross join ({{ dbt_utils.generate_series(upper_bound=52) }}) as numbers + where holiday_multiple_weeks_check.holiday_weeks_spanned > 1 + and numbers.generated_number <= holiday_multiple_weeks_check.holiday_weeks_spanned + ), split_multiweek_holidays as ( + -- Business as usual for holidays that fall within a single week. select _fivetran_synced, holiday_name, @@ -62,49 +92,35 @@ with schedule as ( union all - -- Split holidays that span a weekend. This is for the first half. + -- Split holidays by week that span multiple weeks. select _fivetran_synced, holiday_name, schedule_id, - holiday_valid_from, - cast({{ dbt.last_day('holiday_valid_from', 'week') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until, - holiday_starting_sunday, - cast({{ dbt.dateadd('week', 1, 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday, - holiday_weeks_spanned - from holiday_multiple_weeks_check - where holiday_weeks_spanned > 1 - - union all - - -- Split holidays that span a weekend. This is for the last half. - select - _fivetran_synced, - holiday_name, - schedule_id, - cast({{ dbt.date_trunc('week', 'holiday_valid_until') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from, - holiday_valid_until, - cast({{ dbt.dateadd('week', -1, 'holiday_ending_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, - holiday_ending_sunday, + case + when holiday_week_number = 1 -- first week in multiweek holiday + then holiday_valid_from + else cast({{ dbt.dateadd('week', 'holiday_week_number-1', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) + end as holiday_valid_from, + case + when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday + then holiday_valid_until + else cast({{ dbt.dateadd('day', -1, dbt.dateadd('week', 'holiday_week_number', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday + end as holiday_valid_until, + case + when holiday_week_number = 1 -- first week in multiweek holiday + then holiday_starting_sunday + else cast({{ dbt.date_trunc('week', dbt.dateadd('week', 'holiday_week_number-1', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) + end as holiday_starting_sunday, + case + when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday + then holiday_ending_sunday + else cast({{ dbt.date_trunc('week', dbt.dateadd('week', 'holiday_week_number', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday + end as holiday_ending_sunday, holiday_weeks_spanned - from holiday_multiple_weeks_check + from expanded_holidays where holiday_weeks_spanned > 1 - {# union all - - -- Fill holidays that span more than two weeks. This will fill entire weeks for those sandwiched between the ends. - select - _fivetran_synced, - holiday_name, - schedule_id, - cast({{ dbt.dateadd('week', 1, 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from, - cast({{ dbt.dateadd('week', -1, dbt.dateadd('day', -1, 'holiday_ending_sunday')) }} as {{ dbt.type_timestamp() }}) as holiday_valid_until, - cast({{ dbt.dateadd('week', 1, 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, - cast({{ dbt.dateadd('week', -1, 'holiday_ending_sunday') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday, - holiday_weeks_spanned - from holiday_multiple_weeks_check - where holiday_weeks_spanned > 2 #} - -- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules. ), schedule_holiday as ( @@ -121,28 +137,8 @@ with schedule as ( inner join calendar_spine on holiday_valid_from <= date_day and holiday_valid_until >= date_day - {% endif %} -), calculate_schedules as ( - - select - schedule.schedule_id, - lower(schedule.time_zone) as time_zone, - schedule.start_time, - schedule.end_time, - schedule.schedule_name, - schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, - schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, - coalesce(split_timezones.offset_minutes, 0) as offset_minutes, - -- we'll use these to determine which schedule version to associate tickets with - cast(split_timezones.valid_from as {{ dbt.type_timestamp() }}) as schedule_valid_from, - cast(split_timezones.valid_until as {{ dbt.type_timestamp() }}) as schedule_valid_until - - from schedule - left join split_timezones - on split_timezones.time_zone = lower(schedule.time_zone) - ), join_holidays as ( select calculate_schedules.schedule_id, @@ -177,8 +173,8 @@ with schedule as ( {% if var('using_holidays', True) %} left join schedule_holiday on schedule_holiday.schedule_id = calculate_schedules.schedule_id - and schedule_holiday.holiday_date <= calculate_schedules.schedule_valid_until and schedule_holiday.holiday_date >= calculate_schedules.schedule_valid_from + and schedule_holiday.holiday_date < calculate_schedules.schedule_valid_until {% endif %} ), split_holidays as( diff --git a/models/utils/int_zendesk__calendar_spine.sql b/models/utils/int_zendesk__calendar_spine.sql index 393b6a48..d5f4b444 100644 --- a/models/utils/int_zendesk__calendar_spine.sql +++ b/models/utils/int_zendesk__calendar_spine.sql @@ -28,7 +28,7 @@ with spine as ( dbt_utils.date_spine( datepart = "day", start_date = first_date_adjust, - end_date = dbt.dateadd("week", 1, "current_date") + end_date = dbt.dateadd("week", 52, "current_date") ) }} From 98e6872c42844c4d15a3df23c00fbf475b35aba9 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Tue, 1 Oct 2024 22:20:54 -0500 Subject: [PATCH 41/76] account for non sunday week starts --- integration_tests/dbt_project.yml | 2 + macros/extract_dow.sql | 17 ++ .../history/int_zendesk__schedule_history.sql | 22 ++- .../int_zendesk__schedule_spine.sql | 155 ++++++++++-------- 4 files changed, 128 insertions(+), 68 deletions(-) create mode 100644 macros/extract_dow.sql diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 66c3d094..d17a6243 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -41,6 +41,8 @@ vars: models: +schema: "zendesk_{{ var('directed_schema','dev') }}" + zendesk_source: + +materialized: ephemeral seeds: +quote_columns: "{{ true if target.type == 'redshift' else false }}" diff --git a/macros/extract_dow.sql b/macros/extract_dow.sql new file mode 100644 index 00000000..313d8bbb --- /dev/null +++ b/macros/extract_dow.sql @@ -0,0 +1,17 @@ +{% macro extract_dow(date_or_time) -%} + {{ return(adapter.dispatch('extract_dow', 'zendesk')(date_or_time)) }} +{%- endmacro %} + +-- Snowflake and Postgres use DOW where Sunday = 0 +{% macro default__extract_dow(date_or_time) %} + extract(dow from {{ date_or_time }}) +{% endmacro %} + +-- BigQuery and Databricks use DAYOFWEEK where Sunday = 1, so subtract 1 to make Sunday = 0 +{% macro bigquery__extract_dow(date_or_time) %} + (extract(dayofweek from {{ date_or_time }}) - 1) +{% endmacro %} + +{% macro spark__extract_dow(date_or_time) %} + (extract(dayofweek from {{ date_or_time }}) - 1) +{% endmacro %} \ No newline at end of file diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index b7dd85f5..523ab5e8 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -113,7 +113,7 @@ with schedule as ( cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 4) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm from unnested_schedules -), final as ( +), calculate_start_end_times as ( select _fivetran_synced, @@ -122,10 +122,28 @@ with schedule as ( end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time, valid_from, valid_until, + cast({{ dbt.date_trunc('day', 'valid_from') }} as {{ dbt.type_timestamp() }}) as valid_from_day, + cast({{ dbt.date_trunc('day', 'valid_until') }} as {{ dbt.type_timestamp() }}) as valid_until_day, day_of_week, day_of_week_number from split_times + +), final as ( + select + schedule_id, + start_time, + end_time, + day_of_week, + day_of_week_number, + valid_from_day, + valid_until_day, + -- want to consolidate multiple user changes that don't result in a true schedule change. + min(valid_from) as valid_from, + max(valid_until) as valid_until + from calculate_start_end_times + {{ dbt_utils.group_by(7) }} + ) select * -from final \ No newline at end of file +from calculate_start_end_times \ No newline at end of file diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index f2e5c236..83d1afff 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -5,7 +5,13 @@ End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time) */ -with schedule as ( +with calendar_spine as ( + + select + cast(date_day as {{ dbt.type_timestamp() }} ) as date_day + from {{ ref('int_zendesk__calendar_spine') }} + +), schedule as ( select * from {{ var('schedule') }} @@ -20,66 +26,93 @@ with schedule as ( select schedule.schedule_id, lower(schedule.time_zone) as time_zone, + coalesce(split_timezones.offset_minutes, 0) as offset_minutes, schedule.start_time, schedule.end_time, schedule.schedule_name, schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, - coalesce(split_timezones.offset_minutes, 0) as offset_minutes, -- we'll use these to determine which schedule version to associate tickets with cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as schedule_valid_from, - cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_valid_until - + cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_valid_until, + cast({{ dbt.date_trunc('week', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as schedule_week_start_date, + cast({{ dbt.date_trunc('week', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_week_end_date, + -- check what dow warehouse truncates the week to + {{ zendesk.extract_dow(dbt.date_trunc('week', 'split_timezones.valid_from')) }} as week_start_dow from schedule left join split_timezones on split_timezones.time_zone = lower(schedule.time_zone) -), calendar_spine as ( - - select - cast(date_day as {{ dbt.type_timestamp() }} ) as date_day - from {{ ref('int_zendesk__calendar_spine') }} +), adjust_schedule_week_start as ( + select + schedule_id, + time_zone, + offset_minutes, + start_time_utc, + end_time_utc, + schedule_name, + schedule_valid_from, + schedule_valid_until, + -- This adjusts to Sunday when the warehouse is not truncating to Sunday. + cast({{ dbt.dateadd('day', '-week_start_dow', 'schedule_week_start_date') }} + as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, + cast({{ dbt.dateadd('day', '-week_start_dow', 'schedule_week_end_date') }} + as {{ dbt.type_timestamp() }}) as schedule_ending_sunday + from calculate_schedules {% if var('using_holidays', True) %} ), holiday as ( - select - _fivetran_synced, holiday_name, schedule_id, cast(holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, cast(holiday_end_date_at as {{ dbt.type_timestamp() }}) as holiday_valid_until, - cast({{ dbt.date_trunc('week', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, + cast({{ dbt.date_trunc('week', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_week_start_date, cast({{ dbt.dateadd('week', 1, dbt.date_trunc( 'week', 'holiday_end_date_at') - ) }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday -- the next sunday after the holiday ends + ) }} as {{ dbt.type_timestamp() }}) as holiday_week_end_date, + -- check what dow warehouse truncates the week to + {{ zendesk.extract_dow(dbt.date_trunc('week', 'holiday_start_date_at')) }} as week_start_dow from {{ var('schedule_holiday') }} -), holiday_multiple_weeks_check as ( +), adjust_holiday_week_start as ( + select + holiday_name, + schedule_id, + holiday_valid_from, + holiday_valid_until, + -- adjusts to Sunday when the database is not truncating to Sunday + cast({{ dbt.dateadd('day', '-week_start_dow', 'holiday_week_start_date') }} + as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, + cast({{ dbt.dateadd('day', '-week_start_dow', 'holiday_week_end_date') }} + as {{ dbt.type_timestamp() }}) as holiday_ending_sunday + from holiday +), holiday_multiple_weeks_check as ( + -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. + -- This first step is to find those holidays. select - holiday.*, + adjust_holiday_week_start.*, -- calculate weeks the holiday range spans. Takes into account if the holiday extends into the next year. - (extract(week from holiday_ending_sunday) + extract(year from holiday_ending_sunday) * 52) - - (extract(week from holiday_starting_sunday) + extract(year from holiday_starting_sunday) * 52) - as holiday_weeks_spanned - from holiday + (extract(week from holiday_valid_until) + extract(year from holiday_valid_until)) + - (extract(week from holiday_valid_from) + extract(year from holiday_valid_from)) + + 1 as holiday_weeks_spanned + from adjust_holiday_week_start ), expanded_holidays as ( select holiday_multiple_weeks_check.*, - cast(numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number + cast(week_numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number from holiday_multiple_weeks_check -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks - cross join ({{ dbt_utils.generate_series(upper_bound=52) }}) as numbers + cross join ({{ dbt_utils.generate_series(upper_bound=52) }}) as week_numbers where holiday_multiple_weeks_check.holiday_weeks_spanned > 1 - and numbers.generated_number <= holiday_multiple_weeks_check.holiday_weeks_spanned + and week_numbers.generated_number <= holiday_multiple_weeks_check.holiday_weeks_spanned ), split_multiweek_holidays as ( -- Business as usual for holidays that fall within a single week. select - _fivetran_synced, holiday_name, schedule_id, holiday_valid_from, @@ -94,28 +127,28 @@ with schedule as ( -- Split holidays by week that span multiple weeks. select - _fivetran_synced, holiday_name, schedule_id, case when holiday_week_number = 1 -- first week in multiweek holiday then holiday_valid_from - else cast({{ dbt.dateadd('week', 'holiday_week_number-1', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) + -- we have to use days in case of a wonky week trunc. + else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) end as holiday_valid_from, case when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday then holiday_valid_until - else cast({{ dbt.dateadd('day', -1, dbt.dateadd('week', 'holiday_week_number', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday + else cast({{ dbt.dateadd('day', -1, dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday end as holiday_valid_until, case when holiday_week_number = 1 -- first week in multiweek holiday then holiday_starting_sunday - else cast({{ dbt.date_trunc('week', dbt.dateadd('week', 'holiday_week_number-1', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) + else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) end as holiday_starting_sunday, case when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday then holiday_ending_sunday - else cast({{ dbt.date_trunc('week', dbt.dateadd('week', 'holiday_week_number', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday + else cast({{ dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) end as holiday_ending_sunday, holiday_weeks_spanned from expanded_holidays @@ -125,7 +158,6 @@ with schedule as ( ), schedule_holiday as ( select - split_multiweek_holidays._fivetran_synced, split_multiweek_holidays.holiday_name, split_multiweek_holidays.schedule_id, split_multiweek_holidays.holiday_valid_from, @@ -141,16 +173,16 @@ with schedule as ( ), join_holidays as ( select - calculate_schedules.schedule_id, - calculate_schedules.time_zone, - calculate_schedules.offset_minutes, - calculate_schedules.start_time_utc, - calculate_schedules.end_time_utc, - calculate_schedules.schedule_name, - calculate_schedules.schedule_valid_from, - calculate_schedules.schedule_valid_until, - cast({{ dbt.date_trunc("week", "calculate_schedules.schedule_valid_from") }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, - cast({{ dbt.date_trunc("week", "calculate_schedules.schedule_valid_until") }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday, + adjust_schedule_week_start.schedule_id, + adjust_schedule_week_start.time_zone, + adjust_schedule_week_start.offset_minutes, + adjust_schedule_week_start.start_time_utc, + adjust_schedule_week_start.end_time_utc, + adjust_schedule_week_start.schedule_name, + adjust_schedule_week_start.schedule_valid_from, + adjust_schedule_week_start.schedule_valid_until, + adjust_schedule_week_start.schedule_starting_sunday, + adjust_schedule_week_start.schedule_ending_sunday, {% if var('using_holidays', True) %} schedule_holiday.holiday_date, @@ -168,13 +200,13 @@ with schedule as ( cast(null as {{ dbt.type_timestamp() }}) as holiday_ending_sunday {% endif %} - from calculate_schedules + from adjust_schedule_week_start {% if var('using_holidays', True) %} left join schedule_holiday - on schedule_holiday.schedule_id = calculate_schedules.schedule_id - and schedule_holiday.holiday_date >= calculate_schedules.schedule_valid_from - and schedule_holiday.holiday_date < calculate_schedules.schedule_valid_until + on schedule_holiday.schedule_id = adjust_schedule_week_start.schedule_id + and schedule_holiday.holiday_date >= adjust_schedule_week_start.schedule_valid_from + and schedule_holiday.holiday_date < adjust_schedule_week_start.schedule_valid_until {% endif %} ), split_holidays as( @@ -337,6 +369,8 @@ with schedule as ( holiday_name, holiday_valid_from, holiday_valid_until, + holiday_starting_sunday, + holiday_ending_sunday, valid_from, valid_until, case when holiday_start_or_end = '1_end' then true @@ -348,35 +382,24 @@ with schedule as ( ), valid_minutes as( select holiday_weeks.*, - -- Calculate holiday_valid_from in minutes from Sunday + + -- Calculate holiday_valid_from in minutes from week start case when is_holiday_week then ( - {% if target.type in ('bigquery', 'databricks') %} - -- BigQuery and Databricks use DAYOFWEEK where Sunday = 1, so subtract 1 to make Sunday = 0 - ((extract(dayofweek from holiday_valid_from) - 1) * 24 * 60) - {% else %} - -- Snowflake and Postgres use DOW where Sunday = 0 - (extract(dow from holiday_valid_from) * 24 * 60) - {% endif %} + ({{ zendesk.extract_dow('holiday_valid_from') }} * 24 * 60) + extract(hour from holiday_valid_from) * 60 -- Get hours and convert to minutes + extract(minute from holiday_valid_from) -- Get minutes - offset_minutes -- Timezone adjustment - ) - else null end as holiday_valid_from_minutes_from_sunday, + ) else null + end as holiday_valid_from_minutes_from_week_start, - -- Calculate holiday_valid_until in minutes from Sunday + -- Calculate holiday_valid_until in minutes from week start case when is_holiday_week then ( - ( - {% if target.type in ('bigquery', 'databricks') %} - (extract(dayofweek from holiday_valid_until) - 1) - {% else %} - (extract(dow from holiday_valid_until)) - {% endif %} - + 1) * 24 * 60 -- add 1 day to set the upper bound of the holiday + (({{ zendesk.extract_dow('holiday_valid_until') }} + 1) * 24 * 60) -- add 1 day to set the upper bound of the holiday + extract(hour from holiday_valid_until) * 60 + extract(minute from holiday_valid_until) - offset_minutes - ) - else null end as holiday_valid_until_minutes_from_sunday + ) else null + end as holiday_valid_until_minutes_from_week_start from holiday_weeks ), find_holidays as( @@ -387,8 +410,8 @@ with schedule as ( start_time_utc, end_time_utc, case - when start_time_utc < holiday_valid_until_minutes_from_sunday - and end_time_utc > holiday_valid_from_minutes_from_sunday + when start_time_utc < holiday_valid_until_minutes_from_week_start + and end_time_utc > holiday_valid_from_minutes_from_week_start and is_holiday_week then holiday_name else cast(null as {{ dbt.type_string() }}) @@ -431,4 +454,4 @@ with schedule as ( ) select * -from final +from split_multiweek_holidays From 032158199b6b3c58f5022669cc443f0d61e226ed Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Tue, 1 Oct 2024 22:47:05 -0500 Subject: [PATCH 42/76] updates --- integration_tests/dbt_project.yml | 2 -- models/intermediate/int_zendesk__schedule_spine.sql | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index d17a6243..66c3d094 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -41,8 +41,6 @@ vars: models: +schema: "zendesk_{{ var('directed_schema','dev') }}" - zendesk_source: - +materialized: ephemeral seeds: +quote_columns: "{{ true if target.type == 'redshift' else false }}" diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 83d1afff..9402e876 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -454,4 +454,4 @@ with calendar_spine as ( ) select * -from split_multiweek_holidays +from final From 6163942b8612f800cb292e29b7f5bcebe7e9cffd Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 2 Oct 2024 10:43:43 -0500 Subject: [PATCH 43/76] update weeks spanned calc --- models/intermediate/int_zendesk__schedule_spine.sql | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 9402e876..7c6ba3fd 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -93,10 +93,8 @@ with calendar_spine as ( -- This first step is to find those holidays. select adjust_holiday_week_start.*, - -- calculate weeks the holiday range spans. Takes into account if the holiday extends into the next year. - (extract(week from holiday_valid_until) + extract(year from holiday_valid_until)) - - (extract(week from holiday_valid_from) + extract(year from holiday_valid_from)) - + 1 as holiday_weeks_spanned + -- calculate weeks the holiday range spans + {{ dbt.datediff('holiday_valid_from', 'holiday_valid_until', 'week') }} + 1 as holiday_weeks_spanned from adjust_holiday_week_start ), expanded_holidays as ( @@ -454,4 +452,4 @@ with calendar_spine as ( ) select * -from final +from holiday_multiple_weeks_check From c774f6dc8966f5e5d7676ed99df7f5023da5e89d Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 2 Oct 2024 10:58:14 -0500 Subject: [PATCH 44/76] update naming --- macros/extract_dow.sql | 2 +- .../int_zendesk__schedule_spine.sql | 53 +++++++++++-------- 2 files changed, 31 insertions(+), 24 deletions(-) diff --git a/macros/extract_dow.sql b/macros/extract_dow.sql index 313d8bbb..9763ab97 100644 --- a/macros/extract_dow.sql +++ b/macros/extract_dow.sql @@ -2,7 +2,7 @@ {{ return(adapter.dispatch('extract_dow', 'zendesk')(date_or_time)) }} {%- endmacro %} --- Snowflake and Postgres use DOW where Sunday = 0 +-- Snowflake, Redshift and Postgres use DOW where Sunday = 0 {% macro default__extract_dow(date_or_time) %} extract(dow from {{ date_or_time }}) {% endmacro %} diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 7c6ba3fd..95633494 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -6,21 +6,24 @@ */ with calendar_spine as ( - select cast(date_day as {{ dbt.type_timestamp() }} ) as date_day from {{ ref('int_zendesk__calendar_spine') }} ), schedule as ( - select * from {{ var('schedule') }} ), split_timezones as ( - select * from {{ ref('int_zendesk__timezone_daylight') }} +{% if var('using_holidays', True) %} +), schedule_holiday as ( + select * + from {{ var('schedule_holiday') }} +{% endif %} + ), calculate_schedules as ( select @@ -37,7 +40,7 @@ with calendar_spine as ( cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_valid_until, cast({{ dbt.date_trunc('week', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as schedule_week_start_date, cast({{ dbt.date_trunc('week', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_week_end_date, - -- check what dow warehouse truncates the week to + -- check what dow the warehouse truncates the week to {{ zendesk.extract_dow(dbt.date_trunc('week', 'split_timezones.valid_from')) }} as week_start_dow from schedule left join split_timezones @@ -61,7 +64,7 @@ with calendar_spine as ( from calculate_schedules {% if var('using_holidays', True) %} -), holiday as ( +), schedule_holiday_ranges as ( select holiday_name, schedule_id, @@ -73,7 +76,7 @@ with calendar_spine as ( ) }} as {{ dbt.type_timestamp() }}) as holiday_week_end_date, -- check what dow warehouse truncates the week to {{ zendesk.extract_dow(dbt.date_trunc('week', 'holiday_start_date_at')) }} as week_start_dow - from {{ var('schedule_holiday') }} + from schedule_holiday ), adjust_holiday_week_start as ( select @@ -86,7 +89,7 @@ with calendar_spine as ( as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, cast({{ dbt.dateadd('day', '-week_start_dow', 'holiday_week_end_date') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday - from holiday + from schedule_holiday_ranges ), holiday_multiple_weeks_check as ( -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. @@ -98,6 +101,7 @@ with calendar_spine as ( from adjust_holiday_week_start ), expanded_holidays as ( + -- this only needs to be run for holidays spanning multiple weeks select holiday_multiple_weeks_check.*, cast(week_numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number @@ -130,22 +134,25 @@ with calendar_spine as ( case when holiday_week_number = 1 -- first week in multiweek holiday then holiday_valid_from - -- we have to use days in case of a wonky week trunc. + -- We have to use days in case warehouse does not truncate to Sunday. else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) end as holiday_valid_from, case when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday then holiday_valid_until + -- We have to use days in case warehouse does not truncate to Sunday. else cast({{ dbt.dateadd('day', -1, dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday end as holiday_valid_until, case when holiday_week_number = 1 -- first week in multiweek holiday then holiday_starting_sunday + -- We have to use days in case warehouse does not truncate to Sunday. else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) end as holiday_starting_sunday, case when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday then holiday_ending_sunday + -- We have to use days in case warehouse does not truncate to Sunday. else cast({{ dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) end as holiday_ending_sunday, holiday_weeks_spanned @@ -153,7 +160,7 @@ with calendar_spine as ( where holiday_weeks_spanned > 1 -- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules. -), schedule_holiday as ( +), schedule_holiday_spine as ( select split_multiweek_holidays.holiday_name, @@ -165,8 +172,8 @@ with calendar_spine as ( calendar_spine.date_day as holiday_date from split_multiweek_holidays inner join calendar_spine - on holiday_valid_from <= date_day - and holiday_valid_until >= date_day + on split_multiweek_holidays.holiday_valid_from <= calendar_spine.date_day + and split_multiweek_holidays.holiday_valid_until >= calendar_spine.date_day {% endif %} ), join_holidays as ( @@ -183,12 +190,12 @@ with calendar_spine as ( adjust_schedule_week_start.schedule_ending_sunday, {% if var('using_holidays', True) %} - schedule_holiday.holiday_date, - schedule_holiday.holiday_name, - schedule_holiday.holiday_valid_from, - schedule_holiday.holiday_valid_until, - schedule_holiday.holiday_starting_sunday, - schedule_holiday.holiday_ending_sunday + schedule_holiday_spine.holiday_date, + schedule_holiday_spine.holiday_name, + schedule_holiday_spine.holiday_valid_from, + schedule_holiday_spine.holiday_valid_until, + schedule_holiday_spine.holiday_starting_sunday, + schedule_holiday_spine.holiday_ending_sunday {% else %} cast(null as {{ dbt.type_timestamp() }}) as holiday_date, cast(null as {{ dbt.type_string() }}) as holiday_name, @@ -201,10 +208,10 @@ with calendar_spine as ( from adjust_schedule_week_start {% if var('using_holidays', True) %} - left join schedule_holiday - on schedule_holiday.schedule_id = adjust_schedule_week_start.schedule_id - and schedule_holiday.holiday_date >= adjust_schedule_week_start.schedule_valid_from - and schedule_holiday.holiday_date < adjust_schedule_week_start.schedule_valid_until + left join schedule_holiday_spine + on schedule_holiday_spine.schedule_id = adjust_schedule_week_start.schedule_id + and schedule_holiday_spine.holiday_date >= adjust_schedule_week_start.schedule_valid_from + and schedule_holiday_spine.holiday_date < adjust_schedule_week_start.schedule_valid_until {% endif %} ), split_holidays as( @@ -427,7 +434,7 @@ with calendar_spine as ( union all - -- we want to count the number of records for each schedule start_time_utc and end_time_utc for comparison later + -- we want to count the number of records for each schedule start_time_utc and end_time_utc for filtering later select distinct *, cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_name) @@ -452,4 +459,4 @@ with calendar_spine as ( ) select * -from holiday_multiple_weeks_check +from final From b8a9f4a6b53c7bbd21886602acb0923359a4fa53 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 2 Oct 2024 20:17:52 -0500 Subject: [PATCH 45/76] update to dbt_date --- .../int_zendesk__schedule_spine.sql | 132 +++++++----------- 1 file changed, 48 insertions(+), 84 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 95633494..d89c263e 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -7,7 +7,7 @@ with calendar_spine as ( select - cast(date_day as {{ dbt.type_timestamp() }} ) as date_day + cast(date_day as {{ dbt.type_timestamp() }}) as date_day from {{ ref('int_zendesk__calendar_spine') }} ), schedule as ( @@ -35,70 +35,34 @@ with calendar_spine as ( schedule.schedule_name, schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, - -- we'll use these to determine which schedule version to associate tickets with + -- we'll use these to determine which schedule version to associate tickets with. cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as schedule_valid_from, cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_valid_until, - cast({{ dbt.date_trunc('week', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as schedule_week_start_date, - cast({{ dbt.date_trunc('week', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_week_end_date, - -- check what dow the warehouse truncates the week to - {{ zendesk.extract_dow(dbt.date_trunc('week', 'split_timezones.valid_from')) }} as week_start_dow + cast({{ dbt_date.week_start('split_timezones.valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, + cast({{ dbt_date.week_start('split_timezones.valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday from schedule left join split_timezones on split_timezones.time_zone = lower(schedule.time_zone) -), adjust_schedule_week_start as ( - select - schedule_id, - time_zone, - offset_minutes, - start_time_utc, - end_time_utc, - schedule_name, - schedule_valid_from, - schedule_valid_until, - -- This adjusts to Sunday when the warehouse is not truncating to Sunday. - cast({{ dbt.dateadd('day', '-week_start_dow', 'schedule_week_start_date') }} - as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, - cast({{ dbt.dateadd('day', '-week_start_dow', 'schedule_week_end_date') }} - as {{ dbt.type_timestamp() }}) as schedule_ending_sunday - from calculate_schedules - {% if var('using_holidays', True) %} ), schedule_holiday_ranges as ( select holiday_name, schedule_id, - cast(holiday_start_date_at as {{ dbt.type_timestamp() }} ) as holiday_valid_from, - cast(holiday_end_date_at as {{ dbt.type_timestamp() }}) as holiday_valid_until, - cast({{ dbt.date_trunc('week', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_week_start_date, - cast({{ dbt.dateadd('week', 1, dbt.date_trunc( - 'week', 'holiday_end_date_at') - ) }} as {{ dbt.type_timestamp() }}) as holiday_week_end_date, - -- check what dow warehouse truncates the week to - {{ zendesk.extract_dow(dbt.date_trunc('week', 'holiday_start_date_at')) }} as week_start_dow + cast({{ dbt.date_trunc('day', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from, + cast({{ dbt.date_trunc('day', 'holiday_end_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until, + cast({{ dbt_date.week_start('holiday_start_date_at','UTC') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, + cast({{ dbt_date.week_start(dbt.dateadd('week', 1, 'holiday_end_date_at'),'UTC') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday from schedule_holiday -), adjust_holiday_week_start as ( - select - holiday_name, - schedule_id, - holiday_valid_from, - holiday_valid_until, - -- adjusts to Sunday when the database is not truncating to Sunday - cast({{ dbt.dateadd('day', '-week_start_dow', 'holiday_week_start_date') }} - as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, - cast({{ dbt.dateadd('day', '-week_start_dow', 'holiday_week_end_date') }} - as {{ dbt.type_timestamp() }}) as holiday_ending_sunday - from schedule_holiday_ranges - ), holiday_multiple_weeks_check as ( -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. -- This first step is to find those holidays. select - adjust_holiday_week_start.*, + schedule_holiday_ranges.*, -- calculate weeks the holiday range spans {{ dbt.datediff('holiday_valid_from', 'holiday_valid_until', 'week') }} + 1 as holiday_weeks_spanned - from adjust_holiday_week_start + from schedule_holiday_ranges ), expanded_holidays as ( -- this only needs to be run for holidays spanning multiple weeks @@ -178,16 +142,16 @@ with calendar_spine as ( ), join_holidays as ( select - adjust_schedule_week_start.schedule_id, - adjust_schedule_week_start.time_zone, - adjust_schedule_week_start.offset_minutes, - adjust_schedule_week_start.start_time_utc, - adjust_schedule_week_start.end_time_utc, - adjust_schedule_week_start.schedule_name, - adjust_schedule_week_start.schedule_valid_from, - adjust_schedule_week_start.schedule_valid_until, - adjust_schedule_week_start.schedule_starting_sunday, - adjust_schedule_week_start.schedule_ending_sunday, + calculate_schedules.schedule_id, + calculate_schedules.time_zone, + calculate_schedules.offset_minutes, + calculate_schedules.start_time_utc, + calculate_schedules.end_time_utc, + calculate_schedules.schedule_name, + calculate_schedules.schedule_valid_from, + calculate_schedules.schedule_valid_until, + calculate_schedules.schedule_starting_sunday, + calculate_schedules.schedule_ending_sunday, {% if var('using_holidays', True) %} schedule_holiday_spine.holiday_date, @@ -205,13 +169,13 @@ with calendar_spine as ( cast(null as {{ dbt.type_timestamp() }}) as holiday_ending_sunday {% endif %} - from adjust_schedule_week_start + from calculate_schedules {% if var('using_holidays', True) %} left join schedule_holiday_spine - on schedule_holiday_spine.schedule_id = adjust_schedule_week_start.schedule_id - and schedule_holiday_spine.holiday_date >= adjust_schedule_week_start.schedule_valid_from - and schedule_holiday_spine.holiday_date < adjust_schedule_week_start.schedule_valid_until + on schedule_holiday_spine.schedule_id = calculate_schedules.schedule_id + and schedule_holiday_spine.holiday_date >= calculate_schedules.schedule_valid_from + and schedule_holiday_spine.holiday_date < calculate_schedules.schedule_valid_until {% endif %} ), split_holidays as( @@ -220,7 +184,7 @@ with calendar_spine as ( join_holidays.*, case when holiday_valid_from = holiday_date - then '0_start' -- the number is for ordering later + then '0_gap' -- the number is for ordering later end as holiday_start_or_end, schedule_valid_from as valid_from, holiday_date as valid_until @@ -234,7 +198,7 @@ with calendar_spine as ( join_holidays.*, case when holiday_valid_until = holiday_date - then '1_end' -- the number is for ordering later + then '1_holiday' -- the number is for ordering later end as holiday_start_or_end, holiday_date as valid_from, schedule_valid_until as valid_until @@ -341,9 +305,9 @@ with calendar_spine as ( case when holiday_start_or_end = 'partition_start' then schedule_starting_sunday - when holiday_start_or_end = '0_start' + when holiday_start_or_end = '0_gap' then lag(holiday_ending_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) - when holiday_start_or_end = '1_end' + when holiday_start_or_end = '1_holiday' then holiday_starting_sunday when holiday_start_or_end = 'partition_end' then holiday_ending_sunday @@ -353,9 +317,9 @@ with calendar_spine as ( case when holiday_start_or_end = 'partition_start' then holiday_starting_sunday - when holiday_start_or_end = '0_start' + when holiday_start_or_end = '0_gap' then lead(holiday_starting_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index) - when holiday_start_or_end = '1_end' + when holiday_start_or_end = '1_holiday' then holiday_ending_sunday when holiday_start_or_end = 'partition_end' then schedule_ending_sunday @@ -378,7 +342,8 @@ with calendar_spine as ( holiday_ending_sunday, valid_from, valid_until, - case when holiday_start_or_end = '1_end' then true + case when holiday_start_or_end = '1_holiday' + then true else false end as is_holiday_week from adjust_ranges @@ -389,21 +354,18 @@ with calendar_spine as ( holiday_weeks.*, -- Calculate holiday_valid_from in minutes from week start - case when is_holiday_week then ( - ({{ zendesk.extract_dow('holiday_valid_from') }} * 24 * 60) - + extract(hour from holiday_valid_from) * 60 -- Get hours and convert to minutes - + extract(minute from holiday_valid_from) -- Get minutes - - offset_minutes -- Timezone adjustment - ) else null + case when is_holiday_week + then ({{ dbt.datediff('holiday_starting_sunday', 'holiday_valid_from', 'minute') }} + - offset_minutes) -- timezone adjustment + else null end as holiday_valid_from_minutes_from_week_start, - + -- Calculate holiday_valid_until in minutes from week start - case when is_holiday_week then ( - (({{ zendesk.extract_dow('holiday_valid_until') }} + 1) * 24 * 60) -- add 1 day to set the upper bound of the holiday - + extract(hour from holiday_valid_until) * 60 - + extract(minute from holiday_valid_until) - - offset_minutes - ) else null + case when is_holiday_week + then ({{ dbt.datediff('holiday_starting_sunday', 'holiday_valid_until', 'minute') }} + + 24 * 60 -- add 1 day to set the upper bound of the holiday + - offset_minutes)-- timezone adjustment + else null end as holiday_valid_until_minutes_from_week_start from holiday_weeks @@ -451,11 +413,13 @@ with calendar_spine as ( end_time_utc, is_holiday_week from filter_holidays - -- This filter is for multiple holiday ids in 1 week. We want to check for each schedule start_time_utc and end_time_utc - -- that the holiday count matches the number of distinct records. - -- When rows that don't match, that indicates there is a holiday on that day, and we'll filter them out. + + -- This filter ensures that for each schedule, the count of holidays in a week matches the number + -- of distinct schedule records with the same start_time_utc and end_time_utc. + -- Rows where this count doesn't match indicate overlap with a holiday, so we filter out that record. + -- Additionally, schedule records that fall on a holiday are excluded by checking if holiday_name is null. where number_holidays_in_week = number_records_for_schedule_start_end - and holiday_name is null -- this will remove schedules that fall on a holiday + and holiday_name is null ) select * From cfcd106fef9a83ba69677b6bf04505cedf6a2eb1 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 2 Oct 2024 20:34:31 -0500 Subject: [PATCH 46/76] update comments --- .../int_zendesk__schedule_spine.sql | 54 +++---------------- 1 file changed, 7 insertions(+), 47 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index d89c263e..5acb6c56 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -185,9 +185,7 @@ with calendar_spine as ( case when holiday_valid_from = holiday_date then '0_gap' -- the number is for ordering later - end as holiday_start_or_end, - schedule_valid_from as valid_from, - holiday_date as valid_until + end as holiday_start_or_end from join_holidays where holiday_date is not null @@ -199,9 +197,7 @@ with calendar_spine as ( case when holiday_valid_until = holiday_date then '1_holiday' -- the number is for ordering later - end as holiday_start_or_end, - holiday_date as valid_from, - schedule_valid_until as valid_until + end as holiday_start_or_end from join_holidays where holiday_date is not null @@ -210,9 +206,7 @@ with calendar_spine as ( -- keep records for weeks with no holiday select join_holidays.*, - cast(null as {{ dbt.type_string() }}) as holiday_start_or_end, - schedule_valid_from as valid_from, - schedule_valid_until as valid_until + cast(null as {{ dbt.type_string() }}) as holiday_start_or_end from join_holidays where holiday_date is null @@ -246,8 +240,6 @@ with calendar_spine as ( then 'partition_start' else holiday_start_or_end end as holiday_start_or_end, - valid_from, - valid_until, valid_from_index, max_valid_from_index from valid_from_partition @@ -273,8 +265,6 @@ with calendar_spine as ( holiday_starting_sunday, holiday_ending_sunday, 'partition_end' as holiday_start_or_end, - valid_from, - valid_until, max_valid_from_index + 1 as valid_from_index, max_valid_from_index from valid_from_partition @@ -283,25 +273,7 @@ with calendar_spine as ( ), adjust_ranges as( select - schedule_id, - time_zone, - offset_minutes, - start_time_utc, - end_time_utc, - schedule_name, - holiday_name, - holiday_date, - holiday_valid_from, - holiday_valid_until, - holiday_starting_sunday, - holiday_ending_sunday, - schedule_valid_from, - schedule_valid_until, - schedule_starting_sunday, - schedule_ending_sunday, - valid_from_index, - max_valid_from_index, - holiday_start_or_end, + add_partition_end_row.*, case when holiday_start_or_end = 'partition_start' then schedule_starting_sunday @@ -312,8 +284,7 @@ with calendar_spine as ( when holiday_start_or_end = 'partition_end' then holiday_ending_sunday else schedule_starting_sunday - end as valid_from - , + end as valid_from, case when holiday_start_or_end = 'partition_start' then holiday_starting_sunday @@ -329,24 +300,13 @@ with calendar_spine as ( ), holiday_weeks as( select - schedule_id, - time_zone, - offset_minutes, - start_time_utc, - end_time_utc, - schedule_name, - holiday_name, - holiday_valid_from, - holiday_valid_until, - holiday_starting_sunday, - holiday_ending_sunday, - valid_from, - valid_until, + adjust_ranges.*, case when holiday_start_or_end = '1_holiday' then true else false end as is_holiday_week from adjust_ranges + -- filter out irrelevant records where not (valid_from >= valid_until and holiday_date is not null) ), valid_minutes as( From 781f4fda0804e94dc532363d0b8ebb4a64f3b76e Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 2 Oct 2024 23:00:54 -0500 Subject: [PATCH 47/76] update to dbt date weekstart --- .../int_zendesk__schedule_spine.sql | 41 +++++++++---------- ...endesk__agent_work_time_business_hours.sql | 3 +- ...int_zendesk__reply_time_business_hours.sql | 2 +- ...sk__requester_wait_time_business_hours.sql | 2 +- 4 files changed, 22 insertions(+), 26 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 5acb6c56..25f0d969 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -18,14 +18,7 @@ with calendar_spine as ( select * from {{ ref('int_zendesk__timezone_daylight') }} -{% if var('using_holidays', True) %} -), schedule_holiday as ( - select * - from {{ var('schedule_holiday') }} -{% endif %} - -), calculate_schedules as ( - +), schedule_timezones as ( select schedule.schedule_id, lower(schedule.time_zone) as time_zone, @@ -45,6 +38,10 @@ with calendar_spine as ( on split_timezones.time_zone = lower(schedule.time_zone) {% if var('using_holidays', True) %} +), schedule_holiday as ( + select * + from {{ var('schedule_holiday') }} + ), schedule_holiday_ranges as ( select holiday_name, @@ -142,16 +139,16 @@ with calendar_spine as ( ), join_holidays as ( select - calculate_schedules.schedule_id, - calculate_schedules.time_zone, - calculate_schedules.offset_minutes, - calculate_schedules.start_time_utc, - calculate_schedules.end_time_utc, - calculate_schedules.schedule_name, - calculate_schedules.schedule_valid_from, - calculate_schedules.schedule_valid_until, - calculate_schedules.schedule_starting_sunday, - calculate_schedules.schedule_ending_sunday, + schedule_timezones.schedule_id, + schedule_timezones.time_zone, + schedule_timezones.offset_minutes, + schedule_timezones.start_time_utc, + schedule_timezones.end_time_utc, + schedule_timezones.schedule_name, + schedule_timezones.schedule_valid_from, + schedule_timezones.schedule_valid_until, + schedule_timezones.schedule_starting_sunday, + schedule_timezones.schedule_ending_sunday, {% if var('using_holidays', True) %} schedule_holiday_spine.holiday_date, @@ -169,13 +166,13 @@ with calendar_spine as ( cast(null as {{ dbt.type_timestamp() }}) as holiday_ending_sunday {% endif %} - from calculate_schedules + from schedule_timezones {% if var('using_holidays', True) %} left join schedule_holiday_spine - on schedule_holiday_spine.schedule_id = calculate_schedules.schedule_id - and schedule_holiday_spine.holiday_date >= calculate_schedules.schedule_valid_from - and schedule_holiday_spine.holiday_date < calculate_schedules.schedule_valid_until + on schedule_holiday_spine.schedule_id = schedule_timezones.schedule_id + and schedule_holiday_spine.holiday_date >= schedule_timezones.schedule_valid_from + and schedule_holiday_spine.holiday_date < schedule_timezones.schedule_valid_until {% endif %} ), split_holidays as( diff --git a/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql b/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql index fe9552b6..4078c68b 100644 --- a/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql +++ b/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql @@ -182,10 +182,9 @@ with agent_work_time_filtered_statuses as ( {{ fivetran_utils.timestamp_add( "minute", "cast(((7*24*60) * week_number) + breach_minutes_from_week as " ~ dbt.type_int() ~ " )", - "" ~ dbt.date_trunc('week', 'valid_starting_at') ~ "", + "cast(" ~ dbt_date.week_start('valid_starting_at','UTC') ~ " as " ~ dbt.type_timestamp() ~ " )" ) }} as sla_breach_at from intercepted_periods_agent_filtered - ) select * diff --git a/models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql b/models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql index d977afd0..67a191fa 100644 --- a/models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql +++ b/models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql @@ -173,7 +173,7 @@ with ticket_schedules as ( select *, schedule_end_time + remaining_minutes as breached_at_minutes, - {{ dbt.date_trunc('week', 'sla_applied_at') }} as starting_point, + {{ dbt_date.week_start('sla_applied_at','UTC') }} as starting_point, {{ fivetran_utils.timestamp_add( "minute", "cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as " ~ dbt.type_int() ~ " )", diff --git a/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql b/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql index 2da004bc..4d8eb172 100644 --- a/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql +++ b/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql @@ -182,7 +182,7 @@ with requester_wait_time_filtered_statuses as ( {{ fivetran_utils.timestamp_add( "minute", "cast(((7*24*60) * week_number) + breach_minutes_from_week as " ~ dbt.type_int() ~ " )", - "" ~ dbt.date_trunc('week', 'valid_starting_at') ~ "", + "cast(" ~ dbt_date.week_start('valid_starting_at','UTC') ~ " as " ~ dbt.type_timestamp() ~ " )" ) }} as sla_breach_at from intercepted_periods_agent_filtered From 28457e81166a162db1940189fd95d705b88f7c77 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 3 Oct 2024 14:31:16 -0500 Subject: [PATCH 48/76] modernize calendar spine --- dbt_project.yml | 2 +- macros/clean_schedule.sql | 4 ++ macros/extract_dow.sql | 17 ----- .../history/int_zendesk__schedule_history.sql | 71 +++++-------------- models/utils/int_zendesk__calendar_spine.sql | 51 +++++++------ 5 files changed, 49 insertions(+), 96 deletions(-) delete mode 100644 macros/extract_dow.sql diff --git a/dbt_project.yml b/dbt_project.yml index ebfa3deb..587914fb 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -28,7 +28,7 @@ models: +schema: zendesk_unstructured +materialized: table utils: - +materialized: ephemeral + +materialized: table vars: zendesk: ticket_field_history_columns: ['assignee_id', 'status', 'priority'] diff --git a/macros/clean_schedule.sql b/macros/clean_schedule.sql index 17d54ecb..3445f72c 100644 --- a/macros/clean_schedule.sql +++ b/macros/clean_schedule.sql @@ -1,3 +1,7 @@ {% macro clean_schedule(column_name) -%} + {{ return(adapter.dispatch('clean_schedule', 'zendesk')(column_name)) }} +{%- endmacro %} + +{% macro default__clean_schedule(column_name) -%} replace(replace(replace(replace({{ column_name }}, '{', ''), '}', ''), '"', ''), ' ', '') {%- endmacro %} \ No newline at end of file diff --git a/macros/extract_dow.sql b/macros/extract_dow.sql deleted file mode 100644 index 9763ab97..00000000 --- a/macros/extract_dow.sql +++ /dev/null @@ -1,17 +0,0 @@ -{% macro extract_dow(date_or_time) -%} - {{ return(adapter.dispatch('extract_dow', 'zendesk')(date_or_time)) }} -{%- endmacro %} - --- Snowflake, Redshift and Postgres use DOW where Sunday = 0 -{% macro default__extract_dow(date_or_time) %} - extract(dow from {{ date_or_time }}) -{% endmacro %} - --- BigQuery and Databricks use DAYOFWEEK where Sunday = 1, so subtract 1 to make Sunday = 0 -{% macro bigquery__extract_dow(date_or_time) %} - (extract(dayofweek from {{ date_or_time }}) - 1) -{% endmacro %} - -{% macro spark__extract_dow(date_or_time) %} - (extract(dayofweek from {{ date_or_time }}) - 1) -{% endmacro %} \ No newline at end of file diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index 523ab5e8..8f5aea12 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -1,25 +1,17 @@ -{{ config(enabled=var('using_schedules', True)) }} +{{ config(enabled=var('using_schedules', True) and var('using_schedule_histories', True)) }} -with schedule as ( - - select * - from {{ var('schedule') }} - -), audit_logs as ( +with audit_logs as ( select - _fivetran_synced, - source_id as schedule_id, + cast(source_id as {{ dbt.type_string() }}) as schedule_id, created_at, lower(change_description) as change_description from {{ var('audit_log') }} where lower(change_description) like '%workweek changed from%' ), audit_logs_enhanced as ( - select - _fivetran_synced, + select schedule_id, created_at, - min(created_at) over (partition by schedule_id) as min_created_at, replace(replace(replace(replace(change_description, 'workweek changed from', ''), '"', '"'), @@ -29,40 +21,32 @@ with schedule as ( from audit_logs ), split_to_from as ( - -- 'from' establishes the schedule from before the change occurred select - audit_logs_enhanced.*, - cast('1970-01-01' as {{ dbt.type_timestamp() }}) as valid_from, - created_at as valid_until, - {{ dbt.split_part('change_description_cleaned', "' to '", 1) }} as schedule_change, - 'from' as change_type -- remove before release but helpful for debugging + schedule_id, + created_at as valid_from, + lead(created_at) over ( + partition by schedule_id order by created_at) as valid_until, + -- we only need what the schedule was changed to + {{ dbt.split_part('change_description_cleaned', "' to '", 2) }} as schedule_change from audit_logs_enhanced - where created_at = min_created_at -- the 'from' portion only matters for the first row - union all - - -- 'to' +), consolidate_same_day_changes as ( select - audit_logs_enhanced.*, - created_at as valid_from, - coalesce( - lead(created_at) over ( - partition by schedule_id order by created_at), - {{ dbt.current_timestamp_backcompat() }}) - as valid_until, - {{ dbt.split_part('change_description_cleaned', "' to '", 2) }} as schedule_change, - 'to' as change_type -- remove before release but helpful for debugging - from audit_logs_enhanced + split_to_from.* + from split_to_from + -- Filter out schedules with multiple changes in a day to keep the current one + where cast(valid_from as date) != cast(valid_until as date) + and valid_until is not null ), split_days as ( {% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %} {% for day, day_number in days_of_week.items() %} select - split_to_from.*, + consolidate_same_day_changes.*, '{{ day }}' as day_of_week, cast('{{ day_number }}' as {{ dbt.type_int() }}) as day_of_week_number, {{ zendesk.regex_extract('schedule_change', "'.*?" ~ day ~ ".*?({.*?})'") }} as day_of_week_schedule - from split_to_from + from consolidate_same_day_changes {% if not loop.last %}union all{% endif %} {% endfor %} @@ -116,7 +100,6 @@ with schedule as ( ), calculate_start_end_times as ( select - _fivetran_synced, schedule_id, start_time_hh * 60 + start_time_mm + 24 * 60 * day_of_week_number as start_time, end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time, @@ -127,23 +110,7 @@ with schedule as ( day_of_week, day_of_week_number from split_times - -), final as ( - select - schedule_id, - start_time, - end_time, - day_of_week, - day_of_week_number, - valid_from_day, - valid_until_day, - -- want to consolidate multiple user changes that don't result in a true schedule change. - min(valid_from) as valid_from, - max(valid_until) as valid_until - from calculate_start_end_times - {{ dbt_utils.group_by(7) }} - ) select * -from calculate_start_end_times \ No newline at end of file +from calculate_start_end_times diff --git a/models/utils/int_zendesk__calendar_spine.sql b/models/utils/int_zendesk__calendar_spine.sql index d5f4b444..2e897d6b 100644 --- a/models/utils/int_zendesk__calendar_spine.sql +++ b/models/utils/int_zendesk__calendar_spine.sql @@ -1,42 +1,41 @@ --- depends_on: {{ source('zendesk', 'ticket') }} - +-- depends_on: {{ var('ticket') }} with spine as ( - {% if execute %} - {% set current_ts = dbt.current_timestamp_backcompat() %} - {% set first_date_query %} - select min( created_at ) as min_date from {{ source('zendesk', 'ticket') }} - -- by default take all the data - where cast(created_at as date) >= {{ dbt.dateadd('year', - var('ticket_field_history_timeframe_years', 50), current_ts ) }} - {% endset %} - - {% set first_date = run_query(first_date_query).columns[0][0]|string %} - - {% if target.type == 'postgres' %} - {% set first_date_adjust = "cast('" ~ first_date[0:10] ~ "' as date)" %} - - {% else %} - {% set first_date_adjust = "'" ~ first_date[0:10] ~ "'" %} - - {% endif %} + {% if execute and flags.WHICH in ('run', 'build') %} + + {%- set first_date_query %} + select + coalesce( + min(cast(created_at as date)), + cast({{ dbt.dateadd("month", -1, "current_date") }} as date) + ) as min_date + from {{ var('ticket') }} + -- by default take all the data + where cast(created_at as date) >= {{ dbt.dateadd('year', + - var('ticket_field_history_timeframe_years', 50), "current_date") }} + {% endset -%} + + {% else %} -- {% set first_date_adjust = "2016-01-01" %} + {%- set first_date_query%} + select cast({{ dbt.dateadd("month", -1, "current_date") }} as date) + {% endset -%} - {% else %} {% set first_date_adjust = "2016-01-01" %} {% endif %} - + {%- set first_date = dbt_utils.get_single_value(first_date_query) %} + {{ dbt_utils.date_spine( datepart = "day", - start_date = first_date_adjust, - end_date = dbt.dateadd("week", 52, "current_date") + start_date = "cast('" ~ first_date ~ "' as date)", + end_date = dbt.dateadd("week", 1, "current_date") ) }} ), recast as ( - - select cast(date_day as date) as date_day + select + cast(date_day as date) as date_day from spine - ) select * From c117df971ca103fdf66471ca6f691e67710a1006 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Thu, 3 Oct 2024 16:04:29 -0700 Subject: [PATCH 49/76] make sure we're working with strings when replacing --- macros/clean_schedule.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/macros/clean_schedule.sql b/macros/clean_schedule.sql index 17d54ecb..8046003b 100644 --- a/macros/clean_schedule.sql +++ b/macros/clean_schedule.sql @@ -1,3 +1,3 @@ {% macro clean_schedule(column_name) -%} - replace(replace(replace(replace({{ column_name }}, '{', ''), '}', ''), '"', ''), ' ', '') + replace(replace(replace(replace(cast({{ column_name }} as {{ dbt.type_string() }}), '{', ''), '}', ''), '"', ''), ' ', '') {%- endmacro %} \ No newline at end of file From 933d62e1cad70a5ea96cdd260ff7eb4823d52b96 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Thu, 3 Oct 2024 18:27:29 -0700 Subject: [PATCH 50/76] let's see if bk works --- .buildkite/scripts/run_models.sh | 2 +- integration_tests/dbt_project.yml | 8 +- macros/regex_extract.sql | 33 +++---- .../history/int_zendesk__schedule_history.sql | 85 ++++++------------- 4 files changed, 47 insertions(+), 81 deletions(-) diff --git a/.buildkite/scripts/run_models.sh b/.buildkite/scripts/run_models.sh index 876bafee..9b0d783a 100644 --- a/.buildkite/scripts/run_models.sh +++ b/.buildkite/scripts/run_models.sh @@ -18,7 +18,7 @@ cd integration_tests dbt deps dbt seed --target "$db" --full-refresh dbt run -m +int_zendesk__schedule_history --target "$db" --full-refresh -dbt test --target "$db" +# dbt test --target "$db" # dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh # dbt test --target "$db" diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 66c3d094..97baa339 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -7,6 +7,8 @@ profile: 'integration_tests' vars: zendesk_schema: zendesk_integration_tests_50 + using_schedule_histories: true + using_schedules: true zendesk_source: zendesk_organization_identifier: "organization_data" zendesk_schedule_identifier: "schedule_data" @@ -39,14 +41,12 @@ vars: # fivetran_consistency_ticket_metrics_exclusion_tickets: (11092,11093,11094) # fivetran_integrity_sla_count_match_tickets: (76) -models: - +schema: "zendesk_{{ var('directed_schema','dev') }}" +# models: +# +schema: "zendesk_{{ var('directed_schema','dev') }}" seeds: +quote_columns: "{{ true if target.type == 'redshift' else false }}" zendesk_integration_tests: - +column_types: - _fivetran_synced: timestamp +column_types: _fivetran_synced: timestamp group_data: diff --git a/macros/regex_extract.sql b/macros/regex_extract.sql index bb7f83a6..9fc42ded 100644 --- a/macros/regex_extract.sql +++ b/macros/regex_extract.sql @@ -1,42 +1,45 @@ -{% macro regex_extract(string, regex) -%} +{% macro regex_extract(string, day) -%} -{{ adapter.dispatch('regex_extract', 'zendesk') (string, regex) }} +{{ adapter.dispatch('regex_extract', 'zendesk') (string, day) }} {%- endmacro %} -{% macro default__regex_extract(string, regex) %} - +{% macro default__regex_extract(string, day) %} + {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" %} regexp_extract({{ string }}, {{ regex }} ) {%- endmacro %} -{% macro bigquery__regex_extract(string, regex) %} - +{% macro bigquery__regex_extract(string, day) %} + {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" %} regexp_extract({{ string }}, {{ regex }} ) {%- endmacro %} -{% macro snowflake__regex_extract(string, regex) %} +{% macro snowflake__regex_extract(string, day) %} + {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" %} REGEXP_SUBSTR({{ string }}, {{ regex }}, 1, 1, 'e', 1 ) {%- endmacro %} -{% macro postgres__regex_extract(string, regex) %} +{% macro postgres__regex_extract(string, day) %} + {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" %} - (regexp_matches({{ string }}, {{ regex }}))[1] + (regexp_matches({{ string }}, {{ day }}))[1] {%- endmacro %} -{% macro redshift__regex_extract(string, regex) %} +{% macro redshift__regex_extract(string, day) %} + + {% set regex = '"' ~ day ~ '"' ~ ':\\\{([^\\\}]*)\\\}' -%} - {% set reformatted_regex = regex | replace(".*?", ".*") | replace("{", "\\\{") | replace("}", "\\\}") -%} - REGEXP_SUBSTR({{ string }}, {{ reformatted_regex }}, 1, 1, 'e') + '{' || REGEXP_SUBSTR({{ string }}, '{{ regex }}', 1, 1, 'e') || '}' {%- endmacro %} -{% macro spark__regex_extract(string, regex) %} - {% set reformatted_regex = regex | replace("{", "\\\{") | replace("}", "\\\}") -%} - regexp_extract({{ string }}, {{ reformatted_regex }}, 1) +{% macro spark__regex_extract(string, day) %} + {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" | replace("{", "\\\{") | replace("}", "\\\}") %} + regexp_extract({{ string }}, {{ regex }}, 1) {%- endmacro %} \ No newline at end of file diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index b05462a3..d79c5e34 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -1,14 +1,8 @@ -{{ config(enabled=var('using_schedules', True)) }} +{{ config(enabled=var('using_schedules', True) and var('using_schedule_histories', True)) }} -with schedule as ( - - select * - from {{ var('schedule') }} - -), audit_logs as ( +with audit_logs as ( select - _fivetran_synced, - source_id as schedule_id, + cast(source_id as {{ dbt.type_string() }}) as schedule_id, created_at, lower(change_description) as change_description from {{ var('audit_log') }} @@ -16,95 +10,74 @@ with schedule as ( ), audit_logs_enhanced as ( select - _fivetran_synced, schedule_id, created_at, - min(created_at) over (partition by schedule_id) as min_created_at, - replace(replace(replace(replace(change_description, + replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(change_description, 'workweek changed from', ''), '"', '"'), 'amp;', ''), - '=>', ':') + '=>', ':'), ':mon:', '"mon":'), ':tue:', '"tue":'), ':wed:', '"wed":'), ':thu:', '"thu":'), ':fri:', '"fri":'), ':sat:', '"sat":'), ':sun:', '"sun":') as change_description_cleaned from audit_logs ), split_to_from as ( - -- 'from' establishes the schedule from before the change occurred select - audit_logs_enhanced.*, - cast('1970-01-01' as {{ dbt.type_timestamp() }}) as valid_from, - created_at as valid_until, - {{ dbt.split_part('change_description_cleaned', "' to '", 1) }} as schedule_change, - 'from' as change_type -- remove before release but helpful for debugging + schedule_id, + created_at as valid_from, + lead(created_at) over ( + partition by schedule_id order by created_at) as valid_until, + -- we only need what the schedule was changed to + {{ dbt.split_part('change_description_cleaned', "' to '", 2) }} as schedule_change from audit_logs_enhanced - where created_at = min_created_at -- the 'from' portion only matters for the first row - - union all - -- 'to' +), consolidate_same_day_changes as ( select - audit_logs_enhanced.*, - created_at as valid_from, - coalesce( - lead(created_at) over ( - partition by schedule_id order by created_at), - {{ dbt.current_timestamp_backcompat() }}) - as valid_until, - {{ dbt.split_part('change_description_cleaned', "' to '", 2) }} as schedule_change, - 'to' as change_type -- remove before release but helpful for debugging - from audit_logs_enhanced + split_to_from.* + from split_to_from + -- Filter out schedules with multiple changes in a day to keep the current one + -- where cast(valid_from as date) != cast(valid_until as date) + -- and valid_until is not null ), split_days as ( {% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %} {% for day, day_number in days_of_week.items() %} select - split_to_from.*, + consolidate_same_day_changes.*, '{{ day }}' as day_of_week, cast('{{ day_number }}' as {{ dbt.type_int() }}) as day_of_week_number, - {{ zendesk.regex_extract('schedule_change', "'.*?" ~ day ~ ".*?({.*?})'") }} as day_of_week_schedule - from split_to_from + {{ zendesk.regex_extract('schedule_change', day) }} as day_of_week_schedule + from consolidate_same_day_changes {% if not loop.last %}union all{% endif %} {% endfor %} -{% if target.type == 'redshift '%} +{% if target.type == 'redshift' %} -- using PartiQL syntax to work with redshift's SUPER types, which requires an extra CTE ), redshift_parse_schedule as ( -- Redshift requires another CTE for unnesting select - _fivetran_synced, schedule_id, - created_at, - min_created_at, - change_description, - change_description_cleaned, valid_from, valid_until, schedule_change, - change_type, day_of_week, day_of_week_number, day_of_week_schedule, - json_parse('[' || replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{') || ']') as json_schedule + json_parse('[' || replace(replace(day_of_week_schedule, ', ', ','), ',', '},{') || ']') as json_schedule from split_days + where day_of_week_schedule != '{}' ), unnested_schedules as ( select - _fivetran_synced, schedule_id, - created_at, - min_created_at, - change_description, - change_description_cleaned, valid_from, valid_until, schedule_change, - change_type, day_of_week, day_of_week_number, -- go back to strings cast(day_of_week_schedule as {{ dbt.type_string() }}) as day_of_week_schedule, - {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule + {{ clean_schedule('JSON_SERIALIZE(unnested_schedule)') }} as cleaned_unnested_schedule from redshift_parse_schedule as schedules, schedules.json_schedule as unnested_schedule @@ -133,15 +106,6 @@ with schedule as ( from split_days lateral view explode(from_json(concat('[', replace(day_of_week_schedule, ',', '},{'), ']'), 'array')) as unnested_schedule - {# {%- elif target.type == 'redshift' %} - - json_parse('[' || replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{') || ']') as json_schedule - from split_days - cross join lateral json_parse(replace(replace(day_of_week_schedule, '\}\}', '\}'), '\{\{', '\{')) as element - - cast(null as {{ dbt.type_string() }}) as cleaned_unnested_schedule - from split_days #} - {% else %} cast(null as {{ dbt.type_string() }}) as cleaned_unnested_schedule from split_days @@ -162,7 +126,6 @@ with schedule as ( ), final as ( select - _fivetran_synced, schedule_id, start_time_hh * 60 + start_time_mm + 24 * 60 * day_of_week_number as start_time, end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time, From 0aacff491f18da48b703d665ffbe9424e70a899f Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Thu, 3 Oct 2024 18:33:23 -0700 Subject: [PATCH 51/76] postgres? --- macros/regex_extract.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/macros/regex_extract.sql b/macros/regex_extract.sql index 9fc42ded..cf3541ac 100644 --- a/macros/regex_extract.sql +++ b/macros/regex_extract.sql @@ -26,7 +26,7 @@ {% macro postgres__regex_extract(string, day) %} {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" %} - (regexp_matches({{ string }}, {{ day }}))[1] + (regexp_matches({{ string }}, '{{ regex }}'))[1] {%- endmacro %} From e39919e9e8e85e95d7564ba77a8757c94e405caf Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 4 Oct 2024 04:54:51 -0500 Subject: [PATCH 52/76] i think schedule history may actually be workingggg --- dbt_project.yml | 2 +- .../history/int_zendesk__schedule_history.sql | 75 +++++-- .../int_zendesk__schedule_spine.sql | 31 +-- .../int_zendesk__schedule_timezones.sql | 195 ++++++++++++++++++ 4 files changed, 270 insertions(+), 33 deletions(-) create mode 100644 models/intermediate/int_zendesk__schedule_timezones.sql diff --git a/dbt_project.yml b/dbt_project.yml index 587914fb..ebfa3deb 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -28,7 +28,7 @@ models: +schema: zendesk_unstructured +materialized: table utils: - +materialized: table + +materialized: ephemeral vars: zendesk: ticket_field_history_columns: ['assignee_id', 'status', 'priority'] diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index 8f5aea12..a676f775 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -1,4 +1,4 @@ -{{ config(enabled=var('using_schedules', True) and var('using_schedule_histories', True)) }} +{{ config(enabled=var('using_schedules', true) and var('using_schedule_histories', true)) }} with audit_logs as ( select @@ -11,6 +11,7 @@ with audit_logs as ( ), audit_logs_enhanced as ( select schedule_id, + row_number() over (partition by schedule_id order by created_at) as schedule_id_index, created_at, replace(replace(replace(replace(change_description, 'workweek changed from', ''), @@ -23,30 +24,83 @@ with audit_logs as ( ), split_to_from as ( select schedule_id, + schedule_id_index, created_at as valid_from, lead(created_at) over ( - partition by schedule_id order by created_at) as valid_until, + partition by schedule_id order by schedule_id_index) as valid_until, -- we only need what the schedule was changed to {{ dbt.split_part('change_description_cleaned', "' to '", 2) }} as schedule_change from audit_logs_enhanced -), consolidate_same_day_changes as ( +), find_same_day_changes as ( select - split_to_from.* + schedule_id, + schedule_id_index, + cast(valid_from as date) as valid_from, + cast(valid_until as date) as valid_until, + schedule_change, + row_number() over ( + partition by schedule_id, cast(valid_from as date) + -- ordering to get the latest change when there are multiple on one day + order by valid_from desc, coalesce(valid_until, {{ dbt.current_timestamp_backcompat() }}) desc + ) as row_number from split_to_from - -- Filter out schedules with multiple changes in a day to keep the current one - where cast(valid_from as date) != cast(valid_until as date) + +), consolidate_same_day_changes as ( + select + schedule_id, + schedule_id_index, + valid_from, + valid_until, + schedule_change, + -- for use in the next cte + lag(valid_until) over (partition by schedule_id, schedule_change order by valid_from, valid_until) as previous_valid_until + from find_same_day_changes + where row_number = 1 + -- we don't want the most current schedule since it would be captured by the live schedule. we want to use the live schedule in case we're not using histories. and valid_until is not null +), find_actual_changes as ( + -- sometimes an audit log record is generated but the schedule is actually unchanged. + -- accumulate group flags to create unique groupings for adjacent periods + select + schedule_id, + schedule_id_index, + valid_from, + valid_until, + schedule_change, + -- calculate if this row is adjacent to the previous row + sum(case when previous_valid_until = valid_from then 0 else 1 end) + over (partition by schedule_id, schedule_change order by valid_from) + as group_id + from consolidate_same_day_changes + +), consolidate_actual_changes as ( + -- consolidate the records by finding the min valid_from and max valid_until for each group + select + schedule_id, + group_id, + schedule_change, + max(schedule_id_index) as schedule_id_index, + min(valid_from) as valid_from, + max(valid_until) as valid_until + from find_actual_changes + {{ dbt_utils.group_by(3) }} + +-- now that the schedule changes are cleaned, we can split into the individual schedules periods ), split_days as ( {% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %} {% for day, day_number in days_of_week.items() %} select - consolidate_same_day_changes.*, + schedule_id, + schedule_id_index, + valid_from, + valid_until, + schedule_change, '{{ day }}' as day_of_week, cast('{{ day_number }}' as {{ dbt.type_int() }}) as day_of_week_number, {{ zendesk.regex_extract('schedule_change', "'.*?" ~ day ~ ".*?({.*?})'") }} as day_of_week_schedule - from consolidate_same_day_changes + from consolidate_actual_changes {% if not loop.last %}union all{% endif %} {% endfor %} @@ -88,7 +142,6 @@ with audit_logs as ( {%- endif %} ), split_times as ( - select unnested_schedules.*, cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 1) }}, ' ') as {{ dbt.type_int() }}) as start_time_hh, @@ -98,15 +151,13 @@ with audit_logs as ( from unnested_schedules ), calculate_start_end_times as ( - select schedule_id, + schedule_id_index, start_time_hh * 60 + start_time_mm + 24 * 60 * day_of_week_number as start_time, end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time, valid_from, valid_until, - cast({{ dbt.date_trunc('day', 'valid_from') }} as {{ dbt.type_timestamp() }}) as valid_from_day, - cast({{ dbt.date_trunc('day', 'valid_until') }} as {{ dbt.type_timestamp() }}) as valid_until_day, day_of_week, day_of_week_number from split_times diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 25f0d969..14f5f50e 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -14,28 +14,19 @@ with calendar_spine as ( select * from {{ var('schedule') }} -), split_timezones as ( - select * - from {{ ref('int_zendesk__timezone_daylight') }} - ), schedule_timezones as ( select - schedule.schedule_id, - lower(schedule.time_zone) as time_zone, - coalesce(split_timezones.offset_minutes, 0) as offset_minutes, - schedule.start_time, - schedule.end_time, - schedule.schedule_name, - schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, - schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, - -- we'll use these to determine which schedule version to associate tickets with. - cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as schedule_valid_from, - cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_valid_until, - cast({{ dbt_date.week_start('split_timezones.valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, - cast({{ dbt_date.week_start('split_timezones.valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday - from schedule - left join split_timezones - on split_timezones.time_zone = lower(schedule.time_zone) + schedule_id, + time_zone, + schedule_name, + offset_minutes, + start_time_utc, + end_time_utc, + schedule_valid_from, + schedule_valid_until, + cast({{ dbt_date.week_start('schedule_valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, + cast({{ dbt_date.week_start('schedule_valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday + from {{ ref('int_zendesk__schedule_timezones') }} {% if var('using_holidays', True) %} ), schedule_holiday as ( diff --git a/models/intermediate/int_zendesk__schedule_timezones.sql b/models/intermediate/int_zendesk__schedule_timezones.sql new file mode 100644 index 00000000..d6a9cc70 --- /dev/null +++ b/models/intermediate/int_zendesk__schedule_timezones.sql @@ -0,0 +1,195 @@ +{{ config(enabled=var('using_schedules', True)) }} + +with split_timezones as ( + select * + from {{ ref('int_zendesk__timezone_daylight') }} + +), schedule as ( + select + *, + max(created_at) over (partition by schedule_id order by created_at) as max_created_at + from {{ var('schedule') }} + +{% if var('using_schedule_histories', True) %} +), schedule_history as ( + select * + from {{ ref('int_zendesk__schedule_history') }} + +), schedule_id_timezone as ( + select + distinct schedule_id, + lower(time_zone) as time_zone, + schedule_name + from schedule + where created_at = max_created_at + +), schedule_history_timezones as ( + select + schedule_history.*, + lower(schedule_id_timezone.time_zone) as time_zone, + schedule_id_timezone.schedule_name + from schedule_history + left join schedule_id_timezone + on schedule_id_timezone.schedule_id = schedule_history.schedule_id + -- if there is not time_zone match, the schedule has been deleted + -- we have to filter these records out since time math requires timezone + -- revisit later if this becomes a bigger issue + where time_zone is not null +{% endif %} + +), union_schedule_histories as ( + select + schedule_id, + 0 as schedule_id_index, + created_at, + start_time, + end_time, + lower(time_zone) as time_zone, + schedule_name, + cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill the real value later + cast({{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date) as valid_until, + False as is_historical + from schedule + +{% if var('using_schedule_histories', True) %} + union all + + select + schedule_id, + schedule_id_index, + cast(null as {{ dbt.type_timestamp() }}) as created_at, + start_time, + end_time, + time_zone, + schedule_name, + cast(valid_from as date) as valid_from, + cast(valid_until as date) as valid_until, + True as is_historical + from schedule_history_timezones +{% endif %} + +), fill_current_schedule as ( + select + schedule_id, + schedule_id_index, + start_time, + end_time, + time_zone, + schedule_name, + coalesce(case + when not is_historical + -- get max valid_until from historical rows in the same schedule + then max(case when is_historical then valid_until end) + over (partition by schedule_id) + else valid_from + end, + cast(created_at as date)) + as schedule_valid_from, + valid_until as schedule_valid_until + from union_schedule_histories + +), lag_valid_until as ( + -- sometimes an audit log record is generated but the schedule is actually unchanged. + -- accumulate group flags to create unique groupings for adjacent periods + select + fill_current_schedule.*, + lag(schedule_valid_until) over (partition by schedule_id, start_time, end_time + order by schedule_valid_from, schedule_valid_until) as previous_valid_until + from fill_current_schedule + +), find_actual_changes as ( + -- sometimes an audit log record is generated but the schedule is actually unchanged. + -- accumulate group flags to create unique groupings for adjacent periods + select + schedule_id, + schedule_id_index, + start_time, + end_time, + time_zone, + schedule_name, + schedule_valid_from, + schedule_valid_until, + -- calculate if this row is adjacent to the previous row + sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) + over (partition by schedule_id, start_time, end_time order by schedule_valid_from) + as group_id + from lag_valid_until + +), consolidate_changes as ( + -- consolidate the records by finding the min valid_from and max valid_until for each group + select + schedule_id, + start_time, + end_time, + time_zone, + schedule_name, + max(schedule_id_index) as schedule_id_index, + min(schedule_valid_from) as schedule_valid_from, + max(schedule_valid_until) as schedule_valid_until + from find_actual_changes + {{ dbt_utils.group_by(5) }} + +), schedule_timezones as ( + select + consolidate_changes.schedule_id, + consolidate_changes.schedule_id_index, + consolidate_changes.time_zone, + consolidate_changes.schedule_name, + coalesce(split_timezones.offset_minutes, 0) as offset_minutes, + consolidate_changes.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, + consolidate_changes.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, + cast(consolidate_changes.schedule_valid_from as {{ dbt.type_timestamp() }}) as schedule_valid_from, + cast(consolidate_changes.schedule_valid_until as {{ dbt.type_timestamp() }}) as schedule_valid_until, + -- we'll use these to determine which schedule version to associate tickets with. + cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as timezone_valid_from, + cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as timezone_valid_until + {# , cast({{ dbt_date.week_start('split_timezones.valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as timezone_starting_sunday, + cast({{ dbt_date.week_start('split_timezones.valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as timezone_ending_sunday #} + from consolidate_changes + left join split_timezones + on split_timezones.time_zone = consolidate_changes.time_zone + +), filter_schedule_timezones as ( + select + schedule_timezones.*, + case when schedule_valid_until > timezone_valid_until + then true else false + end as is_timezone_spillover + from schedule_timezones + -- timezone that a schedule start falls within + where (schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until) + -- timezone that a schedule end falls within + or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until) + -- for schedules that span a long time, also find timezones that fall completely within the bounds of the schedule + or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until) + +), assemble_schedule_timezones as ( + select + schedule_id, + schedule_id_index, + time_zone, + schedule_name, + offset_minutes, + start_time_utc, + end_time_utc, + case + when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until + then schedule_valid_from + when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until + then timezone_valid_from + when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until + then timezone_valid_from + end as schedule_valid_from, + case + when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until + then schedule_valid_until + when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until + then timezone_valid_until + when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until + then timezone_valid_until + end as schedule_valid_until + from filter_schedule_timezones +) + +select * +from assemble_schedule_timezones \ No newline at end of file From 8a9d84dd0c16dbad871df869a9153271bbfeb18b Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Fri, 4 Oct 2024 09:27:24 -0700 Subject: [PATCH 53/76] get ready to merge into catherines branch --- .buildkite/scripts/run_models.sh | 10 ++++++---- integration_tests/dbt_project.yml | 4 ++-- models/history/int_zendesk__schedule_history.sql | 5 +++-- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/.buildkite/scripts/run_models.sh b/.buildkite/scripts/run_models.sh index 9b0d783a..e90265c6 100644 --- a/.buildkite/scripts/run_models.sh +++ b/.buildkite/scripts/run_models.sh @@ -17,9 +17,11 @@ echo `pwd` cd integration_tests dbt deps dbt seed --target "$db" --full-refresh -dbt run -m +int_zendesk__schedule_history --target "$db" --full-refresh -# dbt test --target "$db" -# dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh -# dbt test --target "$db" +dbt run --target "$db" --full-refresh +dbt run --target "$db" +dbt test --target "$db" +dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh +dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" +dbt test --target "$db" # dbt run-operation fivetran_utils.drop_schemas_automation --target "$db" \ No newline at end of file diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 97baa339..8cd5429b 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -41,8 +41,8 @@ vars: # fivetran_consistency_ticket_metrics_exclusion_tickets: (11092,11093,11094) # fivetran_integrity_sla_count_match_tickets: (76) -# models: -# +schema: "zendesk_{{ var('directed_schema','dev') }}" +models: + +schema: "zendesk_{{ var('directed_schema','dev') }}" seeds: +quote_columns: "{{ true if target.type == 'redshift' else false }}" diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index d79c5e34..0f2389b1 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -12,6 +12,7 @@ with audit_logs as ( select schedule_id, created_at, + -- Clean up the change_description, sometimes has random html stuff in it replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(change_description, 'workweek changed from', ''), '"', '"'), @@ -35,8 +36,8 @@ with audit_logs as ( split_to_from.* from split_to_from -- Filter out schedules with multiple changes in a day to keep the current one - -- where cast(valid_from as date) != cast(valid_until as date) - -- and valid_until is not null + where cast(valid_from as date) != cast(valid_until as date) -- may need to use date_trunc instead? + and valid_until is not null ), split_days as ( {% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %} From b3c97abf7cf1c1a3881d75fc04aab7709fe078e3 Mon Sep 17 00:00:00 2001 From: Jamie Rodriguez <65564846+fivetran-jamie@users.noreply.github.com> Date: Fri, 4 Oct 2024 10:49:30 -0700 Subject: [PATCH 54/76] postgres revert --- macros/regex_extract.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/macros/regex_extract.sql b/macros/regex_extract.sql index cf3541ac..e367f72a 100644 --- a/macros/regex_extract.sql +++ b/macros/regex_extract.sql @@ -26,7 +26,7 @@ {% macro postgres__regex_extract(string, day) %} {% set regex = "'.*?" ~ day ~ ".*?({.*?})'" %} - (regexp_matches({{ string }}, '{{ regex }}'))[1] + (regexp_matches({{ string }}, {{ regex }}))[1] {%- endmacro %} From eff6401268bbc928d023424fe2414761323fab12 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 4 Oct 2024 14:08:35 -0500 Subject: [PATCH 55/76] add comments --- .../int_zendesk__schedule_spine.sql | 12 +- .../int_zendesk__schedule_timezones.sql | 109 ++++++++++++------ 2 files changed, 79 insertions(+), 42 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 14f5f50e..a5e1fa26 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -33,6 +33,7 @@ with calendar_spine as ( select * from {{ var('schedule_holiday') }} +-- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start. ), schedule_holiday_ranges as ( select holiday_name, @@ -43,17 +44,17 @@ with calendar_spine as ( cast({{ dbt_date.week_start(dbt.dateadd('week', 1, 'holiday_end_date_at'),'UTC') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday from schedule_holiday +-- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. +-- This first step is to find those holidays. ), holiday_multiple_weeks_check as ( - -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. - -- This first step is to find those holidays. select schedule_holiday_ranges.*, -- calculate weeks the holiday range spans {{ dbt.datediff('holiday_valid_from', 'holiday_valid_until', 'week') }} + 1 as holiday_weeks_spanned from schedule_holiday_ranges +-- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte. ), expanded_holidays as ( - -- this only needs to be run for holidays spanning multiple weeks select holiday_multiple_weeks_check.*, cast(week_numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number @@ -63,6 +64,7 @@ with calendar_spine as ( where holiday_multiple_weeks_check.holiday_weeks_spanned > 1 and week_numbers.generated_number <= holiday_multiple_weeks_check.holiday_weeks_spanned +-- Define start and end times for each segment of a multi-week holiday. ), split_multiweek_holidays as ( -- Business as usual for holidays that fall within a single week. @@ -111,7 +113,8 @@ with calendar_spine as ( from expanded_holidays where holiday_weeks_spanned > 1 --- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules. +-- Explodes multi-week holidays into individual days by joining with the calendar_spine. This is necessary to remove schedules +-- that occur during a holiday downstream. ), schedule_holiday_spine as ( select @@ -128,6 +131,7 @@ with calendar_spine as ( and split_multiweek_holidays.holiday_valid_until >= calendar_spine.date_day {% endif %} +-- Joins in the holidays if using or casts nulls if not. ), join_holidays as ( select schedule_timezones.schedule_id, diff --git a/models/intermediate/int_zendesk__schedule_timezones.sql b/models/intermediate/int_zendesk__schedule_timezones.sql index d6a9cc70..ab88682c 100644 --- a/models/intermediate/int_zendesk__schedule_timezones.sql +++ b/models/intermediate/int_zendesk__schedule_timezones.sql @@ -7,7 +7,7 @@ with split_timezones as ( ), schedule as ( select *, - max(created_at) over (partition by schedule_id order by created_at) as max_created_at + max(created_at) over (partition by schedule_id) as max_created_at from {{ var('schedule') }} {% if var('using_schedule_histories', True) %} @@ -15,6 +15,8 @@ with split_timezones as ( select * from {{ ref('int_zendesk__schedule_history') }} +-- Select the most recent timezone associated with each schedule based on +-- the max_created_at timestamp. Historical timezone changes are not yet tracked. ), schedule_id_timezone as ( select distinct schedule_id, @@ -23,6 +25,9 @@ with split_timezones as ( from schedule where created_at = max_created_at +-- Combine historical schedules with the most recent timezone data. Filter +-- out records where the timezone is missing, indicating the schedule has +-- been deleted. ), schedule_history_timezones as ( select schedule_history.*, @@ -31,12 +36,13 @@ with split_timezones as ( from schedule_history left join schedule_id_timezone on schedule_id_timezone.schedule_id = schedule_history.schedule_id - -- if there is not time_zone match, the schedule has been deleted - -- we have to filter these records out since time math requires timezone + -- We have to filter these records out since time math requires timezone -- revisit later if this becomes a bigger issue where time_zone is not null {% endif %} +-- Combine current schedules with historical schedules, marking if each +-- record is historical. Adjust the valid_from and valid_until dates accordingly. ), union_schedule_histories as ( select schedule_id, @@ -68,6 +74,8 @@ with split_timezones as ( from schedule_history_timezones {% endif %} +-- Set the schedule_valid_from for current schedules based on the most recent historical row. +-- This allows the current schedule to pick up where the historical schedule left off. ), fill_current_schedule as ( select schedule_id, @@ -88,18 +96,18 @@ with split_timezones as ( valid_until as schedule_valid_until from union_schedule_histories +-- Detect adjacent time periods by lagging the schedule_valid_until value +-- to identify effectively unchanged schedules. ), lag_valid_until as ( - -- sometimes an audit log record is generated but the schedule is actually unchanged. - -- accumulate group flags to create unique groupings for adjacent periods select fill_current_schedule.*, lag(schedule_valid_until) over (partition by schedule_id, start_time, end_time order by schedule_valid_from, schedule_valid_until) as previous_valid_until from fill_current_schedule +-- Identify unique schedule groupings by detecting gaps between adjacent time +-- periods to group unchanged records for filtering later. ), find_actual_changes as ( - -- sometimes an audit log record is generated but the schedule is actually unchanged. - -- accumulate group flags to create unique groupings for adjacent periods select schedule_id, schedule_id_index, @@ -109,60 +117,67 @@ with split_timezones as ( schedule_name, schedule_valid_from, schedule_valid_until, - -- calculate if this row is adjacent to the previous row - sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) + sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row over (partition by schedule_id, start_time, end_time order by schedule_valid_from) as group_id from lag_valid_until +-- Consolidate records into continuous periods by finding the minimum +-- valid_from and maximum valid_until for each group of unchanged schedules. ), consolidate_changes as ( - -- consolidate the records by finding the min valid_from and max valid_until for each group select schedule_id, start_time, end_time, time_zone, schedule_name, - max(schedule_id_index) as schedule_id_index, + max(schedule_id_index) as schedule_id_index, --this is arbitrary, but helps with keeping groups together downstream. min(schedule_valid_from) as schedule_valid_from, max(schedule_valid_until) as schedule_valid_until from find_actual_changes {{ dbt_utils.group_by(5) }} +-- Reset the schedule_valid_from date for the "default schedule" to 1970-01-01 +-- for downstream models referencing this schedule. See int_zendesk__ticket_schedules. +), reset_schedule_start as ( + select + schedule_id, + schedule_id_index, + time_zone, + schedule_name, + start_time, + end_time, + -- this is for the 'default schedule' (see used in int_zendesk__ticket_schedules) + case + when schedule_valid_from = min(schedule_valid_from) over () then '1970-01-01' + else schedule_valid_from + end as schedule_valid_from, + schedule_valid_until + from consolidate_changes + +-- Adjust the schedule times to UTC by applying the timezone offset. Join all possible +-- time_zone matches for each schedule. The erroneous timezones will be filtered next. ), schedule_timezones as ( select - consolidate_changes.schedule_id, - consolidate_changes.schedule_id_index, - consolidate_changes.time_zone, - consolidate_changes.schedule_name, + reset_schedule_start.schedule_id, + reset_schedule_start.schedule_id_index, + reset_schedule_start.time_zone, + reset_schedule_start.schedule_name, coalesce(split_timezones.offset_minutes, 0) as offset_minutes, - consolidate_changes.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, - consolidate_changes.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, - cast(consolidate_changes.schedule_valid_from as {{ dbt.type_timestamp() }}) as schedule_valid_from, - cast(consolidate_changes.schedule_valid_until as {{ dbt.type_timestamp() }}) as schedule_valid_until, + reset_schedule_start.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, + reset_schedule_start.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, + cast(reset_schedule_start.schedule_valid_from as {{ dbt.type_timestamp() }}) as schedule_valid_from, + cast(reset_schedule_start.schedule_valid_until as {{ dbt.type_timestamp() }}) as schedule_valid_until, -- we'll use these to determine which schedule version to associate tickets with. cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as timezone_valid_from, cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as timezone_valid_until - {# , cast({{ dbt_date.week_start('split_timezones.valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as timezone_starting_sunday, - cast({{ dbt_date.week_start('split_timezones.valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as timezone_ending_sunday #} - from consolidate_changes + from reset_schedule_start left join split_timezones - on split_timezones.time_zone = consolidate_changes.time_zone - -), filter_schedule_timezones as ( - select - schedule_timezones.*, - case when schedule_valid_until > timezone_valid_until - then true else false - end as is_timezone_spillover - from schedule_timezones - -- timezone that a schedule start falls within - where (schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until) - -- timezone that a schedule end falls within - or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until) - -- for schedules that span a long time, also find timezones that fall completely within the bounds of the schedule - or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until) + on split_timezones.time_zone = reset_schedule_start.time_zone +-- Assemble the final schedule-timezone relationship by determining the correct +-- schedule_valid_from and schedule_valid_until based on overlapping periods +-- between the schedule and timezone. ), assemble_schedule_timezones as ( select schedule_id, @@ -172,23 +187,41 @@ with split_timezones as ( offset_minutes, start_time_utc, end_time_utc, +-- Be very careful if changing the order of these case whens--it does matter! case + -- timezone that a schedule start falls within when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until then schedule_valid_from + -- timezone that a schedule end falls within when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until then timezone_valid_from + -- timezones that fall completely within the bounds of the schedule when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until then timezone_valid_from end as schedule_valid_from, case + -- timezone that a schedule end falls within when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until then schedule_valid_until + -- timezone that a schedule start falls within when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until then timezone_valid_until + -- timezones that fall completely within the bounds of the schedule when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until then timezone_valid_until end as schedule_valid_until - from filter_schedule_timezones + + from schedule_timezones + + -- Filter records based on whether the schedule periods overlap with timezone periods. Capture + -- when a schedule start or end falls within a time zone, and also capture timezones that exist + -- entirely within the bounds of a schedule. + -- timezone that a schedule start falls within + where (schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until) + -- timezone that a schedule end falls within + or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until) + -- timezones that fall completely within the bounds of the schedule + or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until) ) select * From 763ca33d90328eccb1cb6639aef75136305bc4c2 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 4 Oct 2024 14:38:02 -0500 Subject: [PATCH 56/76] redshift fixes --- models/history/int_zendesk__schedule_history.sql | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index a676f775..20bf4f45 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -71,8 +71,9 @@ with audit_logs as ( schedule_change, -- calculate if this row is adjacent to the previous row sum(case when previous_valid_until = valid_from then 0 else 1 end) - over (partition by schedule_id, schedule_change order by valid_from) - as group_id + over (partition by schedule_id, schedule_change + order by valid_from + rows between unbounded preceding and current row) -- Redshift needs this frame clause for aggregating from consolidate_same_day_changes ), consolidate_actual_changes as ( @@ -164,4 +165,4 @@ with audit_logs as ( ) select * -from calculate_start_end_times +from calculate_start_end_times \ No newline at end of file From 238bd9674d71a9a9ca3197dc5fa81c84e56693f8 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 4 Oct 2024 15:01:35 -0500 Subject: [PATCH 57/76] redshift fixes --- models/history/int_zendesk__schedule_history.sql | 1 + 1 file changed, 1 insertion(+) diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index 20bf4f45..66e7a56b 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -74,6 +74,7 @@ with audit_logs as ( over (partition by schedule_id, schedule_change order by valid_from rows between unbounded preceding and current row) -- Redshift needs this frame clause for aggregating + as group_id from consolidate_same_day_changes ), consolidate_actual_changes as ( From 4f572037ef8ea90a32005bbcf3baff4fe968d353 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 4 Oct 2024 15:16:42 -0500 Subject: [PATCH 58/76] redshift fixes --- .../int_zendesk__schedule_timezones.sql | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/models/intermediate/int_zendesk__schedule_timezones.sql b/models/intermediate/int_zendesk__schedule_timezones.sql index ab88682c..4eb6d4bb 100644 --- a/models/intermediate/int_zendesk__schedule_timezones.sql +++ b/models/intermediate/int_zendesk__schedule_timezones.sql @@ -30,7 +30,12 @@ with split_timezones as ( -- been deleted. ), schedule_history_timezones as ( select - schedule_history.*, + schedule_history.schedule_id, + schedule_history.schedule_id_index, + schedule_history.start_time, + schedule_history.end_time, + schedule_history.valid_from, + schedule_history.valid_until, lower(schedule_id_timezone.time_zone) as time_zone, schedule_id_timezone.schedule_name from schedule_history @@ -118,8 +123,10 @@ with split_timezones as ( schedule_valid_from, schedule_valid_until, sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row - over (partition by schedule_id, start_time, end_time order by schedule_valid_from) - as group_id + over (partition by schedule_id, start_time, end_time + order by schedule_valid_from + rows between unbounded preceding and current row) + as group_id from lag_valid_until -- Consolidate records into continuous periods by finding the minimum From 5c720b5f4d8e53e2aac129a62cc4faf199781adc Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Fri, 4 Oct 2024 16:30:43 -0500 Subject: [PATCH 59/76] updates --- .../history/int_zendesk__schedule_history.sql | 2 ++ .../int_zendesk__schedule_spine.sql | 25 +++++++------------ 2 files changed, 11 insertions(+), 16 deletions(-) diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index cf7a7201..7b622dd2 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -114,6 +114,7 @@ with audit_logs as ( -- Redshift requires another CTE for unnesting select schedule_id, + schedule_id_index, valid_from, valid_until, schedule_change, @@ -128,6 +129,7 @@ with audit_logs as ( ), unnested_schedules as ( select schedule_id, + schedule_id_index, valid_from, valid_until, schedule_change, diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index a5e1fa26..346b4bd2 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -41,28 +41,21 @@ with calendar_spine as ( cast({{ dbt.date_trunc('day', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from, cast({{ dbt.date_trunc('day', 'holiday_end_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until, cast({{ dbt_date.week_start('holiday_start_date_at','UTC') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, - cast({{ dbt_date.week_start(dbt.dateadd('week', 1, 'holiday_end_date_at'),'UTC') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday - from schedule_holiday - --- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. --- This first step is to find those holidays. -), holiday_multiple_weeks_check as ( - select - schedule_holiday_ranges.*, - -- calculate weeks the holiday range spans - {{ dbt.datediff('holiday_valid_from', 'holiday_valid_until', 'week') }} + 1 as holiday_weeks_spanned - from schedule_holiday_ranges + cast({{ dbt_date.week_start(dbt.dateadd('week', 1, 'holiday_end_date_at'),'UTC') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday, + -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays. + {{ dbt.datediff('holiday_start_date_at', 'holiday_end_date_at', 'week') }} + 1 as holiday_weeks_spanned + from schedule_holiday -- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte. ), expanded_holidays as ( select - holiday_multiple_weeks_check.*, + schedule_holiday_ranges.*, cast(week_numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number - from holiday_multiple_weeks_check + from schedule_holiday_ranges -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks cross join ({{ dbt_utils.generate_series(upper_bound=52) }}) as week_numbers - where holiday_multiple_weeks_check.holiday_weeks_spanned > 1 - and week_numbers.generated_number <= holiday_multiple_weeks_check.holiday_weeks_spanned + where schedule_holiday_ranges.holiday_weeks_spanned > 1 + and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned -- Define start and end times for each segment of a multi-week holiday. ), split_multiweek_holidays as ( @@ -76,7 +69,7 @@ with calendar_spine as ( holiday_starting_sunday, holiday_ending_sunday, holiday_weeks_spanned - from holiday_multiple_weeks_check + from schedule_holiday_ranges where holiday_weeks_spanned = 1 union all From 6d8ff79598ebfcf4390f404ec2ed584b1d90c458 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Sat, 5 Oct 2024 22:27:59 -0500 Subject: [PATCH 60/76] validation update --- .buildkite/scripts/run_models.sh | 4 +- CHANGELOG.md | 5 ++ README.md | 4 +- dbt_project.yml | 3 +- integration_tests/dbt_project.yml | 4 +- .../history/int_zendesk__schedule_history.sql | 2 +- .../int_zendesk__schedule_spine.sql | 63 ++++++++++++------- .../int_zendesk__schedule_timezones.sql | 54 +++++++++++++--- models/utils/int_zendesk__calendar_spine.sql | 10 ++- 9 files changed, 101 insertions(+), 48 deletions(-) diff --git a/.buildkite/scripts/run_models.sh b/.buildkite/scripts/run_models.sh index e90265c6..36b437d3 100644 --- a/.buildkite/scripts/run_models.sh +++ b/.buildkite/scripts/run_models.sh @@ -20,8 +20,8 @@ dbt seed --target "$db" --full-refresh dbt run --target "$db" --full-refresh dbt run --target "$db" dbt test --target "$db" -dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh -dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" +dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_schedule_histories: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh +dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_schedule_histories: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" dbt test --target "$db" # dbt run-operation fivetran_utils.drop_schemas_automation --target "$db" \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index ce0ab3ed..9ecdbee6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,8 @@ +# dbt_zendesk v0.17.0 +[PR #171](https://github.com/fivetran/dbt_zendesk/pull/171) includes the following changes: + +- Update this + # dbt_zendesk v0.17.0 ## New model ([#161](https://github.com/fivetran/dbt_zendesk/pull/161)) diff --git a/README.md b/README.md index bd2c1d11..13554508 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ Include the following zendesk package version in your `packages.yml` file: ```yml packages: - package: fivetran/zendesk - version: [">=0.17.0", "<0.18.0"] + version: [">=0.18.0", "<0.19.0"] ``` > **Note**: Do not include the Zendesk Support source package. The Zendesk Support transform package already has a dependency on the source in its own `packages.yml` file. @@ -231,7 +231,7 @@ This dbt package is dependent on the following dbt packages. These dependencies ```yml packages: - package: fivetran/zendesk_source - version: [">=0.12.0", "<0.13.0"] + version: [">=0.13.0", "<0.14.0"] - package: fivetran/fivetran_utils version: [">=0.4.0", "<0.5.0"] diff --git a/dbt_project.yml b/dbt_project.yml index ebfa3deb..3c555090 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,6 +1,5 @@ name: 'zendesk' -version: '0.17.0' - +version: '0.18.0' config-version: 2 require-dbt-version: [">=1.3.0", "<2.0.0"] diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 8cd5429b..5097b507 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -1,14 +1,12 @@ config-version: 2 name: 'zendesk_integration_tests' -version: '0.17.0' +version: '0.18.0' profile: 'integration_tests' vars: zendesk_schema: zendesk_integration_tests_50 - using_schedule_histories: true - using_schedules: true zendesk_source: zendesk_organization_identifier: "organization_data" zendesk_schedule_identifier: "schedule_data" diff --git a/models/history/int_zendesk__schedule_history.sql b/models/history/int_zendesk__schedule_history.sql index 7b622dd2..cac7775e 100644 --- a/models/history/int_zendesk__schedule_history.sql +++ b/models/history/int_zendesk__schedule_history.sql @@ -1,4 +1,4 @@ -{{ config(enabled=var('using_schedules', true) and var('using_schedule_histories', true)) }} +{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_histories'])) }} with audit_logs as ( select diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 346b4bd2..c43770d0 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -24,6 +24,7 @@ with calendar_spine as ( end_time_utc, schedule_valid_from, schedule_valid_until, + change_type, cast({{ dbt_date.week_start('schedule_valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, cast({{ dbt_date.week_start('schedule_valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday from {{ ref('int_zendesk__schedule_timezones') }} @@ -122,7 +123,6 @@ with calendar_spine as ( inner join calendar_spine on split_multiweek_holidays.holiday_valid_from <= calendar_spine.date_day and split_multiweek_holidays.holiday_valid_until >= calendar_spine.date_day -{% endif %} -- Joins in the holidays if using or casts nulls if not. ), join_holidays as ( @@ -137,31 +137,18 @@ with calendar_spine as ( schedule_timezones.schedule_valid_until, schedule_timezones.schedule_starting_sunday, schedule_timezones.schedule_ending_sunday, - - {% if var('using_holidays', True) %} + schedule_timezones.change_type, schedule_holiday_spine.holiday_date, schedule_holiday_spine.holiday_name, schedule_holiday_spine.holiday_valid_from, schedule_holiday_spine.holiday_valid_until, schedule_holiday_spine.holiday_starting_sunday, schedule_holiday_spine.holiday_ending_sunday - {% else %} - cast(null as {{ dbt.type_timestamp() }}) as holiday_date, - cast(null as {{ dbt.type_string() }}) as holiday_name, - cast(null as {{ dbt.type_timestamp() }}) as holiday_valid_from, - cast(null as {{ dbt.type_timestamp() }}) as holiday_valid_until, - cast(null as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, - cast(null as {{ dbt.type_timestamp() }}) as holiday_ending_sunday - {% endif %} - from schedule_timezones - - {% if var('using_holidays', True) %} left join schedule_holiday_spine on schedule_holiday_spine.schedule_id = schedule_timezones.schedule_id and schedule_holiday_spine.holiday_date >= schedule_timezones.schedule_valid_from and schedule_holiday_spine.holiday_date < schedule_timezones.schedule_valid_until - {% endif %} ), split_holidays as( -- create records for the first day of the holiday @@ -215,6 +202,7 @@ with calendar_spine as ( schedule_valid_until, schedule_starting_sunday, schedule_ending_sunday, + change_type, holiday_name, holiday_date, holiday_valid_from, @@ -243,6 +231,7 @@ with calendar_spine as ( schedule_valid_until, schedule_starting_sunday, schedule_ending_sunday, + change_type, holiday_name, holiday_date, holiday_valid_from, @@ -285,11 +274,25 @@ with calendar_spine as ( ), holiday_weeks as( select - adjust_ranges.*, + schedule_id, + time_zone, + offset_minutes, + start_time_utc, + end_time_utc, + schedule_name, + valid_from, + valid_until, + holiday_name, + holiday_valid_from, + holiday_valid_until, + holiday_starting_sunday, + holiday_ending_sunday, + 'partition_end' as holiday_start_or_end, + valid_from_index, case when holiday_start_or_end = '1_holiday' - then true - else false - end as is_holiday_week + then 'holiday' + else change_type + end as change_type from adjust_ranges -- filter out irrelevant records where not (valid_from >= valid_until and holiday_date is not null) @@ -299,14 +302,14 @@ with calendar_spine as ( holiday_weeks.*, -- Calculate holiday_valid_from in minutes from week start - case when is_holiday_week + case when change_type = 'holiday' then ({{ dbt.datediff('holiday_starting_sunday', 'holiday_valid_from', 'minute') }} - offset_minutes) -- timezone adjustment else null end as holiday_valid_from_minutes_from_week_start, -- Calculate holiday_valid_until in minutes from week start - case when is_holiday_week + case when change_type = 'holiday' then ({{ dbt.datediff('holiday_starting_sunday', 'holiday_valid_until', 'minute') }} + 24 * 60 -- add 1 day to set the upper bound of the holiday - offset_minutes)-- timezone adjustment @@ -321,14 +324,14 @@ with calendar_spine as ( valid_until, start_time_utc, end_time_utc, + change_type, case when start_time_utc < holiday_valid_until_minutes_from_week_start and end_time_utc > holiday_valid_from_minutes_from_week_start - and is_holiday_week + and change_type = 'holiday' then holiday_name else cast(null as {{ dbt.type_string() }}) end as holiday_name, - is_holiday_week, count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc) as number_holidays_in_week from valid_minutes @@ -356,7 +359,7 @@ with calendar_spine as ( valid_until, start_time_utc, end_time_utc, - is_holiday_week + change_type from filter_holidays -- This filter ensures that for each schedule, the count of holidays in a week matches the number @@ -365,6 +368,18 @@ with calendar_spine as ( -- Additionally, schedule records that fall on a holiday are excluded by checking if holiday_name is null. where number_holidays_in_week = number_records_for_schedule_start_end and holiday_name is null + +{% else %} +), final as( + select + schedule_id, + schedule_valid_from as valid_from, + schedule_valid_until as valid_until, + start_time_utc, + end_time_utc, + change_type + from schedule_timezones +{% endif %} ) select * diff --git a/models/intermediate/int_zendesk__schedule_timezones.sql b/models/intermediate/int_zendesk__schedule_timezones.sql index 4eb6d4bb..b58ca639 100644 --- a/models/intermediate/int_zendesk__schedule_timezones.sql +++ b/models/intermediate/int_zendesk__schedule_timezones.sql @@ -44,7 +44,7 @@ with split_timezones as ( -- We have to filter these records out since time math requires timezone -- revisit later if this becomes a bigger issue where time_zone is not null -{% endif %} +{# {% endif %} #} -- Combine current schedules with historical schedules, marking if each -- record is historical. Adjust the valid_from and valid_until dates accordingly. @@ -62,7 +62,7 @@ with split_timezones as ( False as is_historical from schedule -{% if var('using_schedule_histories', True) %} +{# {% if var('using_schedule_histories', True) %} #} union all select @@ -77,7 +77,7 @@ with split_timezones as ( cast(valid_until as date) as valid_until, True as is_historical from schedule_history_timezones -{% endif %} +{# {% endif %} #} -- Set the schedule_valid_from for current schedules based on the most recent historical row. -- This allows the current schedule to pick up where the historical schedule left off. @@ -144,8 +144,7 @@ with split_timezones as ( from find_actual_changes {{ dbt_utils.group_by(5) }} --- Reset the schedule_valid_from date for the "default schedule" to 1970-01-01 --- for downstream models referencing this schedule. See int_zendesk__ticket_schedules. +-- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01. ), reset_schedule_start as ( select schedule_id, @@ -156,7 +155,7 @@ with split_timezones as ( end_time, -- this is for the 'default schedule' (see used in int_zendesk__ticket_schedules) case - when schedule_valid_from = min(schedule_valid_from) over () then '1970-01-01' + when schedule_valid_from = min(schedule_valid_from) over (partition by schedule_id) then '1970-01-01' else schedule_valid_from end as schedule_valid_from, schedule_valid_until @@ -185,7 +184,7 @@ with split_timezones as ( -- Assemble the final schedule-timezone relationship by determining the correct -- schedule_valid_from and schedule_valid_until based on overlapping periods -- between the schedule and timezone. -), assemble_schedule_timezones as ( +), final_schedule as ( select schedule_id, schedule_id_index, @@ -194,6 +193,8 @@ with split_timezones as ( offset_minutes, start_time_utc, end_time_utc, + timezone_valid_from, + timezone_valid_until, -- Be very careful if changing the order of these case whens--it does matter! case -- timezone that a schedule start falls within @@ -229,7 +230,44 @@ with split_timezones as ( or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until) -- timezones that fall completely within the bounds of the schedule or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until) + +{% else %} + +), final_schedule as ( + select + schedule.schedule_id, + 0 as schedule_id_index, + lower(schedule.time_zone) as time_zone, + schedule.schedule_name, + coalesce(split_timezones.offset_minutes, 0) as offset_minutes, + schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc, + schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc, + cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as schedule_valid_from, + cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_valid_until, + cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as timezone_valid_from, + cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as timezone_valid_until + from schedule + left join split_timezones + on split_timezones.time_zone = lower(schedule.time_zone) +{% endif %} + +), final as ( + select + schedule_id, + schedule_id_index, + time_zone, + schedule_name, + offset_minutes, + start_time_utc, + end_time_utc, + schedule_valid_from, + schedule_valid_until, + case when schedule_valid_from = timezone_valid_from + then 'timezone' + else 'schedule' + end as change_type + from final_schedule ) select * -from assemble_schedule_timezones \ No newline at end of file +from final \ No newline at end of file diff --git a/models/utils/int_zendesk__calendar_spine.sql b/models/utils/int_zendesk__calendar_spine.sql index 2e897d6b..1d52ebb2 100644 --- a/models/utils/int_zendesk__calendar_spine.sql +++ b/models/utils/int_zendesk__calendar_spine.sql @@ -15,15 +15,13 @@ with spine as ( - var('ticket_field_history_timeframe_years', 50), "current_date") }} {% endset -%} - {% else %} -- {% set first_date_adjust = "2016-01-01" %} - {%- set first_date_query%} - select cast({{ dbt.dateadd("month", -1, "current_date") }} as date) - {% endset -%} + {%- set first_date = dbt_utils.get_single_value(first_date_query) %} + + {% else %} + {%- set first_date = '2016-01-01' %} {% endif %} - {%- set first_date = dbt_utils.get_single_value(first_date_query) %} - {{ dbt_utils.date_spine( datepart = "day", From c100e511f649587a5685150a88dff6dfd16b7934 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Sat, 5 Oct 2024 22:59:42 -0500 Subject: [PATCH 61/76] split models --- .../tests/integrity/metrics_count_match.sql | 1 - .../int_zendesk__schedule_history.sql | 0 .../int_zendesk__schedule_holiday.sql | 113 +++++++++++++++ .../int_zendesk__schedule_spine.sql | 136 ++---------------- .../int_zendesk__schedule_timezones.sql | 2 + 5 files changed, 128 insertions(+), 124 deletions(-) rename models/{history => intermediate}/int_zendesk__schedule_history.sql (100%) create mode 100644 models/intermediate/int_zendesk__schedule_holiday.sql diff --git a/integration_tests/tests/integrity/metrics_count_match.sql b/integration_tests/tests/integrity/metrics_count_match.sql index 715176c4..7e5b260c 100644 --- a/integration_tests/tests/integrity/metrics_count_match.sql +++ b/integration_tests/tests/integrity/metrics_count_match.sql @@ -14,7 +14,6 @@ with stg_count as ( metric_count as ( select count(*) as metric_ticket_count - from source from {{ ref('zendesk__ticket_metrics') }} ) diff --git a/models/history/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql similarity index 100% rename from models/history/int_zendesk__schedule_history.sql rename to models/intermediate/int_zendesk__schedule_history.sql diff --git a/models/intermediate/int_zendesk__schedule_holiday.sql b/models/intermediate/int_zendesk__schedule_holiday.sql new file mode 100644 index 00000000..03dc43ee --- /dev/null +++ b/models/intermediate/int_zendesk__schedule_holiday.sql @@ -0,0 +1,113 @@ +{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_holidays'])) }} + +/* + The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings. + End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time) +*/ + +with calendar_spine as ( + select + cast(date_day as {{ dbt.type_timestamp() }}) as date_day + from {{ ref('int_zendesk__calendar_spine') }} + +), schedule as ( + select * + from {{ var('schedule') }} + +), schedule_holiday as ( + select * + from {{ var('schedule_holiday') }} + +-- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start. +), schedule_holiday_ranges as ( + select + holiday_name, + schedule_id, + cast({{ dbt.date_trunc('day', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from, + cast({{ dbt.date_trunc('day', 'holiday_end_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until, + cast({{ dbt_date.week_start('holiday_start_date_at','UTC') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, + cast({{ dbt_date.week_start(dbt.dateadd('week', 1, 'holiday_end_date_at'),'UTC') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday, + -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays. + {{ dbt.datediff('holiday_start_date_at', 'holiday_end_date_at', 'week') }} + 1 as holiday_weeks_spanned + from schedule_holiday + +-- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte. +), expanded_holidays as ( + select + schedule_holiday_ranges.*, + cast(week_numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number + from schedule_holiday_ranges + -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks + cross join ({{ dbt_utils.generate_series(upper_bound=52) }}) as week_numbers + where schedule_holiday_ranges.holiday_weeks_spanned > 1 + and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned + +-- Define start and end times for each segment of a multi-week holiday. +), split_multiweek_holidays as ( + + -- Business as usual for holidays that fall within a single week. + select + holiday_name, + schedule_id, + holiday_valid_from, + holiday_valid_until, + holiday_starting_sunday, + holiday_ending_sunday, + holiday_weeks_spanned + from schedule_holiday_ranges + where holiday_weeks_spanned = 1 + + union all + + -- Split holidays by week that span multiple weeks. + select + holiday_name, + schedule_id, + case + when holiday_week_number = 1 -- first week in multiweek holiday + then holiday_valid_from + -- We have to use days in case warehouse does not truncate to Sunday. + else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) + end as holiday_valid_from, + case + when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday + then holiday_valid_until + -- We have to use days in case warehouse does not truncate to Sunday. + else cast({{ dbt.dateadd('day', -1, dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday + end as holiday_valid_until, + case + when holiday_week_number = 1 -- first week in multiweek holiday + then holiday_starting_sunday + -- We have to use days in case warehouse does not truncate to Sunday. + else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) + end as holiday_starting_sunday, + case + when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday + then holiday_ending_sunday + -- We have to use days in case warehouse does not truncate to Sunday. + else cast({{ dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) + end as holiday_ending_sunday, + holiday_weeks_spanned + from expanded_holidays + where holiday_weeks_spanned > 1 + +-- Explodes multi-week holidays into individual days by joining with the calendar_spine. This is necessary to remove schedules +-- that occur during a holiday downstream. +), holiday_spine as ( + + select + split_multiweek_holidays.holiday_name, + split_multiweek_holidays.schedule_id, + split_multiweek_holidays.holiday_valid_from, + split_multiweek_holidays.holiday_valid_until, + split_multiweek_holidays.holiday_starting_sunday, + split_multiweek_holidays.holiday_ending_sunday, + calendar_spine.date_day as holiday_date + from split_multiweek_holidays + inner join calendar_spine + on split_multiweek_holidays.holiday_valid_from <= calendar_spine.date_day + and split_multiweek_holidays.holiday_valid_until >= calendar_spine.date_day +) + +select * +from holiday_spine diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index c43770d0..bb8b22c0 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -5,124 +5,14 @@ End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time) */ -with calendar_spine as ( - select - cast(date_day as {{ dbt.type_timestamp() }}) as date_day - from {{ ref('int_zendesk__calendar_spine') }} - -), schedule as ( +with schedule_timezones as ( select * - from {{ var('schedule') }} - -), schedule_timezones as ( - select - schedule_id, - time_zone, - schedule_name, - offset_minutes, - start_time_utc, - end_time_utc, - schedule_valid_from, - schedule_valid_until, - change_type, - cast({{ dbt_date.week_start('schedule_valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, - cast({{ dbt_date.week_start('schedule_valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday from {{ ref('int_zendesk__schedule_timezones') }} {% if var('using_holidays', True) %} -), schedule_holiday as ( +), schedule_holidays as ( select * - from {{ var('schedule_holiday') }} - --- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start. -), schedule_holiday_ranges as ( - select - holiday_name, - schedule_id, - cast({{ dbt.date_trunc('day', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from, - cast({{ dbt.date_trunc('day', 'holiday_end_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until, - cast({{ dbt_date.week_start('holiday_start_date_at','UTC') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday, - cast({{ dbt_date.week_start(dbt.dateadd('week', 1, 'holiday_end_date_at'),'UTC') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday, - -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays. - {{ dbt.datediff('holiday_start_date_at', 'holiday_end_date_at', 'week') }} + 1 as holiday_weeks_spanned - from schedule_holiday - --- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte. -), expanded_holidays as ( - select - schedule_holiday_ranges.*, - cast(week_numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number - from schedule_holiday_ranges - -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks - cross join ({{ dbt_utils.generate_series(upper_bound=52) }}) as week_numbers - where schedule_holiday_ranges.holiday_weeks_spanned > 1 - and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned - --- Define start and end times for each segment of a multi-week holiday. -), split_multiweek_holidays as ( - - -- Business as usual for holidays that fall within a single week. - select - holiday_name, - schedule_id, - holiday_valid_from, - holiday_valid_until, - holiday_starting_sunday, - holiday_ending_sunday, - holiday_weeks_spanned - from schedule_holiday_ranges - where holiday_weeks_spanned = 1 - - union all - - -- Split holidays by week that span multiple weeks. - select - holiday_name, - schedule_id, - case - when holiday_week_number = 1 -- first week in multiweek holiday - then holiday_valid_from - -- We have to use days in case warehouse does not truncate to Sunday. - else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) - end as holiday_valid_from, - case - when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday - then holiday_valid_until - -- We have to use days in case warehouse does not truncate to Sunday. - else cast({{ dbt.dateadd('day', -1, dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday - end as holiday_valid_until, - case - when holiday_week_number = 1 -- first week in multiweek holiday - then holiday_starting_sunday - -- We have to use days in case warehouse does not truncate to Sunday. - else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) - end as holiday_starting_sunday, - case - when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday - then holiday_ending_sunday - -- We have to use days in case warehouse does not truncate to Sunday. - else cast({{ dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }}) - end as holiday_ending_sunday, - holiday_weeks_spanned - from expanded_holidays - where holiday_weeks_spanned > 1 - --- Explodes multi-week holidays into individual days by joining with the calendar_spine. This is necessary to remove schedules --- that occur during a holiday downstream. -), schedule_holiday_spine as ( - - select - split_multiweek_holidays.holiday_name, - split_multiweek_holidays.schedule_id, - split_multiweek_holidays.holiday_valid_from, - split_multiweek_holidays.holiday_valid_until, - split_multiweek_holidays.holiday_starting_sunday, - split_multiweek_holidays.holiday_ending_sunday, - calendar_spine.date_day as holiday_date - from split_multiweek_holidays - inner join calendar_spine - on split_multiweek_holidays.holiday_valid_from <= calendar_spine.date_day - and split_multiweek_holidays.holiday_valid_until >= calendar_spine.date_day + from {{ ref('int_zendesk__schedule_holiday') }} -- Joins in the holidays if using or casts nulls if not. ), join_holidays as ( @@ -138,17 +28,17 @@ with calendar_spine as ( schedule_timezones.schedule_starting_sunday, schedule_timezones.schedule_ending_sunday, schedule_timezones.change_type, - schedule_holiday_spine.holiday_date, - schedule_holiday_spine.holiday_name, - schedule_holiday_spine.holiday_valid_from, - schedule_holiday_spine.holiday_valid_until, - schedule_holiday_spine.holiday_starting_sunday, - schedule_holiday_spine.holiday_ending_sunday + schedule_holidays.holiday_date, + schedule_holidays.holiday_name, + schedule_holidays.holiday_valid_from, + schedule_holidays.holiday_valid_until, + schedule_holidays.holiday_starting_sunday, + schedule_holidays.holiday_ending_sunday from schedule_timezones - left join schedule_holiday_spine - on schedule_holiday_spine.schedule_id = schedule_timezones.schedule_id - and schedule_holiday_spine.holiday_date >= schedule_timezones.schedule_valid_from - and schedule_holiday_spine.holiday_date < schedule_timezones.schedule_valid_until + left join schedule_holidays + on schedule_holidays.schedule_id = schedule_timezones.schedule_id + and schedule_holidays.holiday_date >= schedule_timezones.schedule_valid_from + and schedule_holidays.holiday_date < schedule_timezones.schedule_valid_until ), split_holidays as( -- create records for the first day of the holiday diff --git a/models/intermediate/int_zendesk__schedule_timezones.sql b/models/intermediate/int_zendesk__schedule_timezones.sql index b58ca639..fe55fc52 100644 --- a/models/intermediate/int_zendesk__schedule_timezones.sql +++ b/models/intermediate/int_zendesk__schedule_timezones.sql @@ -262,6 +262,8 @@ with split_timezones as ( end_time_utc, schedule_valid_from, schedule_valid_until, + cast({{ dbt_date.week_start('schedule_valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, + cast({{ dbt_date.week_start('schedule_valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday, case when schedule_valid_from = timezone_valid_from then 'timezone' else 'schedule' From 56bb9540c0f1fd9bdeadb6bfe8f5ba659ab9ebd7 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Mon, 7 Oct 2024 17:53:40 -0500 Subject: [PATCH 62/76] update ymls --- .quickstart/quickstart.yml | 5 ++++ dbt_project.yml | 4 ++++ macros/json_parse_nonscalar.sql | 41 --------------------------------- 3 files changed, 9 insertions(+), 41 deletions(-) delete mode 100644 macros/json_parse_nonscalar.sql diff --git a/.quickstart/quickstart.yml b/.quickstart/quickstart.yml index a2c8a71b..3c4f6947 100644 --- a/.quickstart/quickstart.yml +++ b/.quickstart/quickstart.yml @@ -5,6 +5,7 @@ dbt_versions: ">=1.3.0 <2.0.0" table_variables: using_schedules: + - audit_log - daylight_time - schedule_holiday - schedule @@ -17,6 +18,10 @@ table_variables: - ticket_form_history using_organization_tags: - organization_tag + using_schedule_histories: + - audit_log + using_holidays: + - schedule_holiday destination_configurations: databricks: diff --git a/dbt_project.yml b/dbt_project.yml index 3c555090..7b17cc2b 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -13,6 +13,10 @@ models: intermediate: +schema: zendesk_intermediate +materialized: table + int_zendesk__schedule_timezones: + +materialized: ephemeral + int_zendesk__schedule_holiday: + +materialized: ephemeral reply_times: +materialized: ephemeral resolution_times: diff --git a/macros/json_parse_nonscalar.sql b/macros/json_parse_nonscalar.sql deleted file mode 100644 index 00ffc007..00000000 --- a/macros/json_parse_nonscalar.sql +++ /dev/null @@ -1,41 +0,0 @@ -{% macro json_parse_nonscalar(string, string_path) -%} - -{{ adapter.dispatch('json_parse_nonscalar', 'zendesk') (string, string_path) }} - -{%- endmacro %} - -{% macro default__json_parse_nonscalar(string, string_path) %} - - json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} ) - -{% endmacro %} - -{% macro redshift__json_parse_nonscalar(string, string_path) %} - - json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} ) - -{% endmacro %} - -{% macro bigquery__json_parse_nonscalar(string, string_path) %} - - json_extract({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ') - -{% endmacro %} - -{% macro postgres__json_parse_nonscalar(string, string_path) %} - - {{string}}::json #>> '{ {%- for s in string_path -%}{{ s }}{%- if not loop.last -%},{%- endif -%}{%- endfor -%} }' - -{% endmacro %} - -{% macro snowflake__json_parse_nonscalar(string, string_path) %} - - parse_json( {{string}} ) {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%} - -{% endmacro %} - -{% macro spark__json_parse_nonscalar(string, string_path) %} - - {{string}} : {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%} - -{% endmacro %} \ No newline at end of file From c4929cf459cfbcc951a59fac580e00f1d166279f Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Tue, 8 Oct 2024 17:30:58 -0500 Subject: [PATCH 63/76] revise scchedule groups --- .quickstart/quickstart.yml | 2 - .../int_zendesk__schedule_history.sql | 2 +- .../int_zendesk__schedule_timezones.sql | 57 ++++++++----------- .../int_zendesk__ticket_historical_status.sql | 2 +- .../int_zendesk__ticket_schedules.sql | 2 +- ...esk__agent_work_time_filtered_statuses.sql | 2 +- .../int_zendesk__sla_policy_applied.sql | 2 +- ..._requester_wait_time_filtered_statuses.sql | 2 +- .../int_zendesk__field_calendar_spine.sql | 2 +- .../utils/int_zendesk__timezone_daylight.sql | 6 +- models/zendesk__sla_policies.sql | 4 +- models/zendesk__ticket_metrics.sql | 8 +-- 12 files changed, 41 insertions(+), 50 deletions(-) diff --git a/.quickstart/quickstart.yml b/.quickstart/quickstart.yml index 3c4f6947..1260c604 100644 --- a/.quickstart/quickstart.yml +++ b/.quickstart/quickstart.yml @@ -5,9 +5,7 @@ dbt_versions: ">=1.3.0 <2.0.0" table_variables: using_schedules: - - audit_log - daylight_time - - schedule_holiday - schedule - time_zone using_domain_names: diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql index cac7775e..0a2409d5 100644 --- a/models/intermediate/int_zendesk__schedule_history.sql +++ b/models/intermediate/int_zendesk__schedule_history.sql @@ -43,7 +43,7 @@ with audit_logs as ( row_number() over ( partition by schedule_id, cast(valid_from as date) -- ordering to get the latest change when there are multiple on one day - order by valid_from desc, coalesce(valid_until, {{ dbt.current_timestamp_backcompat() }}) desc + order by valid_from desc, coalesce(valid_until, {{ dbt.current_timestamp() }}) desc ) as row_number from split_to_from diff --git a/models/intermediate/int_zendesk__schedule_timezones.sql b/models/intermediate/int_zendesk__schedule_timezones.sql index fe55fc52..d24b0fea 100644 --- a/models/intermediate/int_zendesk__schedule_timezones.sql +++ b/models/intermediate/int_zendesk__schedule_timezones.sql @@ -44,7 +44,6 @@ with split_timezones as ( -- We have to filter these records out since time math requires timezone -- revisit later if this becomes a bigger issue where time_zone is not null -{# {% endif %} #} -- Combine current schedules with historical schedules, marking if each -- record is historical. Adjust the valid_from and valid_until dates accordingly. @@ -58,11 +57,10 @@ with split_timezones as ( lower(time_zone) as time_zone, schedule_name, cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill the real value later - cast({{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date) as valid_until, + cast({{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date) as valid_until, False as is_historical from schedule -{# {% if var('using_schedule_histories', True) %} #} union all select @@ -77,7 +75,6 @@ with split_timezones as ( cast(valid_until as date) as valid_until, True as is_historical from schedule_history_timezones -{# {% endif %} #} -- Set the schedule_valid_from for current schedules based on the most recent historical row. -- This allows the current schedule to pick up where the historical schedule left off. @@ -110,39 +107,35 @@ with split_timezones as ( order by schedule_valid_from, schedule_valid_until) as previous_valid_until from fill_current_schedule --- Identify unique schedule groupings by detecting gaps between adjacent time --- periods to group unchanged records for filtering later. -), find_actual_changes as ( - select - schedule_id, - schedule_id_index, - start_time, +-- Identify unique schedule groupings +), assign_groups as ( + select distinct + schedule_id, + start_time, end_time, - time_zone, - schedule_name, - schedule_valid_from, - schedule_valid_until, - sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row - over (partition by schedule_id, start_time, end_time - order by schedule_valid_from - rows between unbounded preceding and current row) - as group_id - from lag_valid_until + row_number() over (partition by schedule_id order by start_time) as group_id + from fill_current_schedule + {{ dbt_utils.group_by(3) }} -- Consolidate records into continuous periods by finding the minimum -- valid_from and maximum valid_until for each group of unchanged schedules. ), consolidate_changes as ( select - schedule_id, - start_time, - end_time, - time_zone, - schedule_name, - max(schedule_id_index) as schedule_id_index, --this is arbitrary, but helps with keeping groups together downstream. - min(schedule_valid_from) as schedule_valid_from, - max(schedule_valid_until) as schedule_valid_until - from find_actual_changes - {{ dbt_utils.group_by(5) }} + fill_current_schedule.schedule_id, + fill_current_schedule.start_time, + fill_current_schedule.end_time, + fill_current_schedule.time_zone, + fill_current_schedule.schedule_name, + assign_groups.group_id, + min(fill_current_schedule.schedule_id_index) as schedule_id_index, --helps with keeping groups together downstream. + min(fill_current_schedule.schedule_valid_from) as schedule_valid_from, + max(fill_current_schedule.schedule_valid_until) as schedule_valid_until + from fill_current_schedule + left join assign_groups + on assign_groups.schedule_id = fill_current_schedule.schedule_id + and assign_groups.start_time = fill_current_schedule.start_time + and assign_groups.end_time = fill_current_schedule.end_time + {{ dbt_utils.group_by(6) }} -- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01. ), reset_schedule_start as ( @@ -155,7 +148,7 @@ with split_timezones as ( end_time, -- this is for the 'default schedule' (see used in int_zendesk__ticket_schedules) case - when schedule_valid_from = min(schedule_valid_from) over (partition by schedule_id) then '1970-01-01' + when schedule_valid_from = min(schedule_valid_from) over () then '1970-01-01' else schedule_valid_from end as schedule_valid_from, schedule_valid_until diff --git a/models/intermediate/int_zendesk__ticket_historical_status.sql b/models/intermediate/int_zendesk__ticket_historical_status.sql index 5d5911a9..9e7e39f0 100644 --- a/models/intermediate/int_zendesk__ticket_historical_status.sql +++ b/models/intermediate/int_zendesk__ticket_historical_status.sql @@ -15,7 +15,7 @@ with ticket_status_history as ( valid_ending_at, {{ dbt.datediff( 'valid_starting_at', - "coalesce(valid_ending_at, " ~ dbt.current_timestamp_backcompat() ~ ")", + "coalesce(valid_ending_at, " ~ dbt.current_timestamp() ~ ")", 'minute') }} as status_duration_calendar_minutes, value as status, -- MIGHT BE ABLE TO DELETE ROWS BELOW diff --git a/models/intermediate/int_zendesk__ticket_schedules.sql b/models/intermediate/int_zendesk__ticket_schedules.sql index db8e03f0..0ee37b3a 100644 --- a/models/intermediate/int_zendesk__ticket_schedules.sql +++ b/models/intermediate/int_zendesk__ticket_schedules.sql @@ -76,7 +76,7 @@ with ticket as ( schedule_id, schedule_created_at, coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at) - , {{ fivetran_utils.timestamp_add("hour", 1000, "" ~ dbt.current_timestamp_backcompat() ~ "") }} ) as schedule_invalidated_at + , {{ fivetran_utils.timestamp_add("hour", 1000, "" ~ dbt.current_timestamp() ~ "") }} ) as schedule_invalidated_at from schedule_events ) diff --git a/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql b/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql index 008f6878..13625256 100644 --- a/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql +++ b/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql @@ -17,7 +17,7 @@ with agent_work_time_sla as ( greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at, coalesce( ticket_historical_status.valid_ending_at, - {{ fivetran_utils.timestamp_add('day', 30, "" ~ dbt.current_timestamp_backcompat() ~ "") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past). + {{ fivetran_utils.timestamp_add('day', 30, "" ~ dbt.current_timestamp() ~ "") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past). ticket_historical_status.status as ticket_status, agent_work_time_sla.sla_applied_at, agent_work_time_sla.target, diff --git a/models/sla_policy/int_zendesk__sla_policy_applied.sql b/models/sla_policy/int_zendesk__sla_policy_applied.sql index 2602e7e7..1b99fb0c 100644 --- a/models/sla_policy/int_zendesk__sla_policy_applied.sql +++ b/models/sla_policy/int_zendesk__sla_policy_applied.sql @@ -47,7 +47,7 @@ with ticket_field_history as ( left join sla_policy_name on sla_policy_name.ticket_id = sla_policy_applied.ticket_id and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at - and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, {{ dbt.current_timestamp_backcompat() }}) + and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, {{ dbt.current_timestamp() }}) where sla_policy_applied.latest_sla = 1 ) diff --git a/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql b/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql index 8ea160fb..7c8e81de 100644 --- a/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql +++ b/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql @@ -17,7 +17,7 @@ with requester_wait_time_sla as ( greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at, coalesce( ticket_historical_status.valid_ending_at, - {{ fivetran_utils.timestamp_add('day', 30, "" ~ dbt.current_timestamp_backcompat() ~ "") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past). + {{ fivetran_utils.timestamp_add('day', 30, "" ~ dbt.current_timestamp() ~ "") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past). ticket_historical_status.status as ticket_status, requester_wait_time_sla.sla_applied_at, requester_wait_time_sla.target, diff --git a/models/ticket_history/int_zendesk__field_calendar_spine.sql b/models/ticket_history/int_zendesk__field_calendar_spine.sql index 5e961604..045b92db 100644 --- a/models/ticket_history/int_zendesk__field_calendar_spine.sql +++ b/models/ticket_history/int_zendesk__field_calendar_spine.sql @@ -21,7 +21,7 @@ with calendar as ( select *, -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings - cast( {{ dbt.date_trunc('day', "case when status != 'closed' then " ~ dbt.current_timestamp_backcompat() ~ " else updated_at end") }} as date) as open_until + cast( {{ dbt.date_trunc('day', "case when status != 'closed' then " ~ dbt.current_timestamp() ~ " else updated_at end") }} as date) as open_until from {{ var('ticket') }} ), joined as ( diff --git a/models/utils/int_zendesk__timezone_daylight.sql b/models/utils/int_zendesk__timezone_daylight.sql index ee3c5a42..21d53a0e 100644 --- a/models/utils/int_zendesk__timezone_daylight.sql +++ b/models/utils/int_zendesk__timezone_daylight.sql @@ -48,7 +48,7 @@ with timezone as ( coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from, -- daylight_start_utc is null for timezones that don't use DT - coalesce(daylight_start_utc, cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date)) as valid_until + coalesce(daylight_start_utc, cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date)) as valid_until from order_timezone_dt @@ -77,12 +77,12 @@ with timezone as ( max(daylight_end_utc) as valid_from, -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future. - cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date) as valid_until + cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date) as valid_until from order_timezone_dt group by 1, 2 -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979. - having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp_backcompat() }} as date) + having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp() }} as date) ), final as ( select diff --git a/models/zendesk__sla_policies.sql b/models/zendesk__sla_policies.sql index 13050979..f6a500a2 100644 --- a/models/zendesk__sla_policies.sql +++ b/models/zendesk__sla_policies.sql @@ -123,11 +123,11 @@ select in_business_hours, sla_breach_at, case when sla_elapsed_time is null - then ({{ dbt.datediff("sla_applied_at", dbt.current_timestamp_backcompat(), 'second') }} / 60) --This will create an entry for active sla's + then ({{ dbt.datediff("sla_applied_at", dbt.current_timestamp(), 'second') }} / 60) --This will create an entry for active sla's else sla_elapsed_time end as sla_elapsed_time, sla_breach_at > current_timestamp as is_active_sla, - case when (sla_breach_at > {{ dbt.current_timestamp_backcompat() }}) + case when (sla_breach_at > {{ dbt.current_timestamp() }}) then null else is_sla_breached end as is_sla_breach diff --git a/models/zendesk__ticket_metrics.sql b/models/zendesk__ticket_metrics.sql index c22fe52f..fb8dc9ff 100644 --- a/models/zendesk__ticket_metrics.sql +++ b/models/zendesk__ticket_metrics.sql @@ -104,16 +104,16 @@ select coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies, case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null - then ({{ dbt.datediff("ticket_enriched.requester_last_login_at", dbt.current_timestamp_backcompat(), 'second') }} /60) + then ({{ dbt.datediff("ticket_enriched.requester_last_login_at", dbt.current_timestamp(), 'second') }} /60) end as requester_last_login_age_minutes, case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null - then ({{ dbt.datediff("ticket_enriched.assignee_last_login_at", dbt.current_timestamp_backcompat(), 'second') }} /60) + then ({{ dbt.datediff("ticket_enriched.assignee_last_login_at", dbt.current_timestamp(), 'second') }} /60) end as assignee_last_login_age_minutes, case when lower(ticket_enriched.status) not in ('solved','closed') - then ({{ dbt.datediff("ticket_enriched.created_at", dbt.current_timestamp_backcompat(), 'second') }} /60) + then ({{ dbt.datediff("ticket_enriched.created_at", dbt.current_timestamp(), 'second') }} /60) end as unsolved_ticket_age_minutes, case when lower(ticket_enriched.status) not in ('solved','closed') - then ({{ dbt.datediff("ticket_enriched.updated_at", dbt.current_timestamp_backcompat(), 'second') }} /60) + then ({{ dbt.datediff("ticket_enriched.updated_at", dbt.current_timestamp(), 'second') }} /60) end as unsolved_ticket_age_since_update_minutes, case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution then true From 1d1b2e1bbea6d9c90f1f181ed851028586be26ae Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 9 Oct 2024 01:04:08 -0500 Subject: [PATCH 64/76] updates --- dbt_project.yml | 8 +- .../int_zendesk__schedule_history.sql | 75 +++++++----------- .../int_zendesk__schedule_holiday.sql | 2 +- .../int_zendesk__schedule_spine.sql | 12 +-- .../int_zendesk__schedule_timezones.sql | 78 +++++++++++-------- 5 files changed, 84 insertions(+), 91 deletions(-) diff --git a/dbt_project.yml b/dbt_project.yml index 7b17cc2b..c086e9c6 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -13,10 +13,10 @@ models: intermediate: +schema: zendesk_intermediate +materialized: table - int_zendesk__schedule_timezones: - +materialized: ephemeral - int_zendesk__schedule_holiday: - +materialized: ephemeral + # int_zendesk__schedule_timezones: + # +materialized: ephemeral + # int_zendesk__schedule_holiday: + # +materialized: ephemeral reply_times: +materialized: ephemeral resolution_times: diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql index 0a2409d5..8b531f4c 100644 --- a/models/intermediate/int_zendesk__schedule_history.sql +++ b/models/intermediate/int_zendesk__schedule_history.sql @@ -8,10 +8,11 @@ with audit_logs as ( from {{ var('audit_log') }} where lower(change_description) like '%workweek changed from%' +-- the formats for change_description vary, so it needs to be cleaned ), audit_logs_enhanced as ( select schedule_id, - row_number() over (partition by schedule_id order by created_at) as schedule_id_index, + rank() over (partition by schedule_id order by created_at desc) as schedule_id_index, created_at, -- Clean up the change_description, sometimes has random html stuff in it replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(change_description, @@ -26,10 +27,10 @@ with audit_logs as ( select schedule_id, schedule_id_index, - created_at as valid_from, - lead(created_at) over ( - partition by schedule_id order by schedule_id_index) as valid_until, - -- we only need what the schedule was changed to + created_at, + cast(created_at as date) as valid_from, + -- each change_description has two parts: 1-from the old schedule 2-to the new schedule. + {{ dbt.split_part('change_description_cleaned', "' to '", 1) }} as schedule_change_from, {{ dbt.split_part('change_description_cleaned', "' to '", 2) }} as schedule_change from audit_logs_enhanced @@ -37,60 +38,33 @@ with audit_logs as ( select schedule_id, schedule_id_index, - cast(valid_from as date) as valid_from, - cast(valid_until as date) as valid_until, + created_at, + valid_from, + schedule_change_from, schedule_change, row_number() over ( - partition by schedule_id, cast(valid_from as date) + partition by schedule_id, valid_from -- valid from is type date -- ordering to get the latest change when there are multiple on one day - order by valid_from desc, coalesce(valid_until, {{ dbt.current_timestamp() }}) desc + order by schedule_id_index, schedule_change_from -- use the length of schedule_change_from to tie break, which will deprioritize empty "from" schedules ) as row_number from split_to_from +-- multiple changes can occur on one day, so we will keep only the latest change in a day. ), consolidate_same_day_changes as ( select schedule_id, schedule_id_index, + created_at, valid_from, - valid_until, - schedule_change, - -- for use in the next cte - lag(valid_until) over (partition by schedule_id, schedule_change order by valid_from, valid_until) as previous_valid_until + lead(valid_from) over ( + partition by schedule_id order by schedule_id_index desc) as valid_until, + schedule_change from find_same_day_changes where row_number = 1 - -- we don't want the most current schedule since it would be captured by the live schedule. we want to use the live schedule in case we're not using histories. - and valid_until is not null - -), find_actual_changes as ( - -- sometimes an audit log record is generated but the schedule is actually unchanged. - -- accumulate group flags to create unique groupings for adjacent periods - select - schedule_id, - schedule_id_index, - valid_from, - valid_until, - schedule_change, - -- calculate if this row is adjacent to the previous row - sum(case when previous_valid_until = valid_from then 0 else 1 end) - over (partition by schedule_id, schedule_change - order by valid_from - rows between unbounded preceding and current row) -- Redshift needs this frame clause for aggregating - as group_id - from consolidate_same_day_changes - -), consolidate_actual_changes as ( - -- consolidate the records by finding the min valid_from and max valid_until for each group - select - schedule_id, - group_id, - schedule_change, - max(schedule_id_index) as schedule_id_index, - min(valid_from) as valid_from, - max(valid_until) as valid_until - from find_actual_changes - {{ dbt_utils.group_by(3) }} --- now that the schedule changes are cleaned, we can split into the individual schedules periods +-- Creates a record for each day of the week for each schedule_change event. +-- This is done by iterating over the days of the week, extracting the corresponding +-- schedule data for each day, and unioning the results after each iteration. ), split_days as ( {% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %} {% for day, day_number in days_of_week.items() %} @@ -102,12 +76,17 @@ with audit_logs as ( schedule_change, '{{ day }}' as day_of_week, cast('{{ day_number }}' as {{ dbt.type_int() }}) as day_of_week_number, - {{ zendesk.regex_extract('schedule_change', day) }} as day_of_week_schedule + {{ zendesk.regex_extract('schedule_change', day) }} as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field. from consolidate_same_day_changes + -- Exclude records with a null valid_until, which indicates it is the current schedule. + -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories. + where valid_until is not null {% if not loop.last %}union all{% endif %} {% endfor %} +-- A single day may contain multiple start and stop times, so we need to generate a separate record for each. +-- The day_of_week_schedule is structured like a JSON string, requiring warehouse-specific logic to flatten it into individual records. {% if target.type == 'redshift' %} -- using PartiQL syntax to work with redshift's SUPER types, which requires an extra CTE ), redshift_parse_schedule as ( @@ -124,7 +103,7 @@ with audit_logs as ( json_parse('[' || replace(replace(day_of_week_schedule, ', ', ','), ',', '},{') || ']') as json_schedule from split_days - where day_of_week_schedule != '{}' + where day_of_week_schedule != '{}' -- exclude when the day_of_week_schedule in empty. ), unnested_schedules as ( select @@ -173,6 +152,7 @@ with audit_logs as ( {% endif %} +-- Each cleaned_unnested_schedule will have the format hh:mm:hh:mm, so we can extract each time part. ), split_times as ( select unnested_schedules.*, @@ -182,6 +162,7 @@ with audit_logs as ( cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', "':'", 4) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm from unnested_schedules +-- Calculate the start_time and end_time as minutes from Sunday ), calculate_start_end_times as ( select schedule_id, diff --git a/models/intermediate/int_zendesk__schedule_holiday.sql b/models/intermediate/int_zendesk__schedule_holiday.sql index 03dc43ee..72dbfc13 100644 --- a/models/intermediate/int_zendesk__schedule_holiday.sql +++ b/models/intermediate/int_zendesk__schedule_holiday.sql @@ -91,7 +91,7 @@ with calendar_spine as ( from expanded_holidays where holiday_weeks_spanned > 1 --- Explodes multi-week holidays into individual days by joining with the calendar_spine. This is necessary to remove schedules +-- Explodes holidays into individual days by joining with the calendar_spine. This is necessary to remove schedules -- that occur during a holiday downstream. ), holiday_spine as ( diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index bb8b22c0..2170a320 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -1,8 +1,9 @@ {{ config(enabled=var('using_schedules', True)) }} /* - The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings. - End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time) + This model generates `valid_from` and `valid_until` timestamps for each schedule start_time and stop_time, + accounting for timezone changes, holidays, and historical schedule adjustments. The inclusion of holidays + and historical changes is controlled by variables `using_holidays` and `using_schedule_histories`. */ with schedule_timezones as ( @@ -40,8 +41,8 @@ with schedule_timezones as ( and schedule_holidays.holiday_date >= schedule_timezones.schedule_valid_from and schedule_holidays.holiday_date < schedule_timezones.schedule_valid_until -), split_holidays as( - -- create records for the first day of the holiday +), split_holidays as ( + -- Creates a record that will be used for the time before a holiday select join_holidays.*, case @@ -53,7 +54,7 @@ with schedule_timezones as ( union all - -- create records for the last day of the holiday + -- Creates another record that will be used for the holiday itself select join_holidays.*, case @@ -78,6 +79,7 @@ with schedule_timezones as ( row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index, count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index from split_holidays + -- filter out records that have a holiday_date but aren't marked as a start or end. where not (holiday_date is not null and holiday_start_or_end is null) ), add_partition_end_row as( diff --git a/models/intermediate/int_zendesk__schedule_timezones.sql b/models/intermediate/int_zendesk__schedule_timezones.sql index d24b0fea..b2b75470 100644 --- a/models/intermediate/int_zendesk__schedule_timezones.sql +++ b/models/intermediate/int_zendesk__schedule_timezones.sql @@ -45,19 +45,18 @@ with split_timezones as ( -- revisit later if this becomes a bigger issue where time_zone is not null --- Combine current schedules with historical schedules, marking if each --- record is historical. Adjust the valid_from and valid_until dates accordingly. +-- Combine current schedules with historical schedules. Adjust the valid_from and valid_until dates accordingly. ), union_schedule_histories as ( select schedule_id, - 0 as schedule_id_index, + 0 as schedule_id_index, -- set the index as 0 for the current schedule created_at, start_time, end_time, lower(time_zone) as time_zone, schedule_name, - cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill the real value later - cast({{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date) as valid_until, + cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill this value later + cast({{ dbt.current_timestamp() }} as date) as valid_until, False as is_historical from schedule @@ -87,9 +86,9 @@ with split_timezones as ( time_zone, schedule_name, coalesce(case - when not is_historical + when schedule_id_index = 0 -- get max valid_until from historical rows in the same schedule - then max(case when is_historical then valid_until end) + then max(case when schedule_id_index > 0 then valid_until end) over (partition by schedule_id) else valid_from end, @@ -107,37 +106,48 @@ with split_timezones as ( order by schedule_valid_from, schedule_valid_until) as previous_valid_until from fill_current_schedule --- Identify unique schedule groupings -), assign_groups as ( - select distinct - schedule_id, - start_time, +-- Identify distinct schedule groupings based on schedule_id, start_time, and end_time. +-- Consolidate only adjacent schedules; if a schedule changes and later reverts to its original time, +-- we want to maintain the intermediate schedule change. +), find_actual_changes as ( + select + schedule_id, + schedule_id_index, + start_time, end_time, - row_number() over (partition by schedule_id order by start_time) as group_id - from fill_current_schedule - {{ dbt_utils.group_by(3) }} + time_zone, + schedule_name, + schedule_valid_from, + schedule_valid_until, + + -- The group_id increments only when there is a gap between the previous schedule's + -- valid_until and the current schedule's valid_from, signaling the schedules are not adjacent. + -- Adjacent schedules with the same start_time and end_time are grouped together, + -- while non-adjacent schedules are treated as separate groups. + sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row + over (partition by schedule_id, start_time, end_time + order by schedule_valid_from + rows between unbounded preceding and current row) + as group_id + from lag_valid_until -- Consolidate records into continuous periods by finding the minimum --- valid_from and maximum valid_until for each group of unchanged schedules. +-- valid_from and maximum valid_until for each group. ), consolidate_changes as ( select - fill_current_schedule.schedule_id, - fill_current_schedule.start_time, - fill_current_schedule.end_time, - fill_current_schedule.time_zone, - fill_current_schedule.schedule_name, - assign_groups.group_id, - min(fill_current_schedule.schedule_id_index) as schedule_id_index, --helps with keeping groups together downstream. - min(fill_current_schedule.schedule_valid_from) as schedule_valid_from, - max(fill_current_schedule.schedule_valid_until) as schedule_valid_until - from fill_current_schedule - left join assign_groups - on assign_groups.schedule_id = fill_current_schedule.schedule_id - and assign_groups.start_time = fill_current_schedule.start_time - and assign_groups.end_time = fill_current_schedule.end_time + schedule_id, + start_time, + end_time, + time_zone, + schedule_name, + group_id, + min(schedule_id_index) as schedule_id_index, --helps with tracking downstream. + min(schedule_valid_from) as schedule_valid_from, + max(schedule_valid_until) as schedule_valid_until + from find_actual_changes {{ dbt_utils.group_by(6) }} --- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01. +-- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01 for full schedule coverage. ), reset_schedule_start as ( select schedule_id, @@ -146,9 +156,8 @@ with split_timezones as ( schedule_name, start_time, end_time, - -- this is for the 'default schedule' (see used in int_zendesk__ticket_schedules) case - when schedule_valid_from = min(schedule_valid_from) over () then '1970-01-01' + when schedule_valid_from = min(schedule_valid_from) over (partition by schedule_id) then '1970-01-01' else schedule_valid_from end as schedule_valid_from, schedule_valid_until @@ -188,7 +197,7 @@ with split_timezones as ( end_time_utc, timezone_valid_from, timezone_valid_until, --- Be very careful if changing the order of these case whens--it does matter! + -- Be very careful if changing the order of these case whens--it does matter! case -- timezone that a schedule start falls within when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until @@ -255,6 +264,7 @@ with split_timezones as ( end_time_utc, schedule_valid_from, schedule_valid_until, + -- use dbt_date.week_start to ensure we truncate to Sunday cast({{ dbt_date.week_start('schedule_valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, cast({{ dbt_date.week_start('schedule_valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday, case when schedule_valid_from = timezone_valid_from From 05fefd5339483486168b73d5f7ee64130365a6d3 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 9 Oct 2024 10:16:22 -0500 Subject: [PATCH 65/76] add inline comments --- DECISIONLOG.md | 8 +-- .../int_zendesk__schedule_holiday.sql | 44 +++++++------ .../int_zendesk__schedule_spine.sql | 62 ++++++------------- .../int_zendesk__schedule_timezones.sql | 4 +- 4 files changed, 46 insertions(+), 72 deletions(-) diff --git a/DECISIONLOG.md b/DECISIONLOG.md index 78bb8133..7cb3c7d7 100644 --- a/DECISIONLOG.md +++ b/DECISIONLOG.md @@ -1,12 +1,10 @@ # Decision Log -## Tracking Ticket SLA Policies Into the Future -In our models we generate a future time series for ticket SLA policies. This is limited to a year to maintain performance. +## Multiple Schedule Changes in a Day -## No Historical Schedule Reference -At the current moment the Fivetran Zendesk Support connector does not contain historical data of schedules. This means if a schedule is created in the Zendesk Support UI and remains untouched for years, but then is adjusted in the current month you will see the data synced in the raw `schedule` table to reflect the current adjusted schedule. As a result the raw data will lose all historical reference of what this schedule range was previously. -Therefore, if you are leveraging the `using_schedule` variable as `true` to replicate business hour metrics this data model will only have a reference to the current range of any given schedule. This means tickets from the previous two years that were leveraging the __old__ schedule will not be reported as using the __new__ schedule. If this data limitation is a concern to you, we recommend opening a [Fivetran Support Feature Request](https://support.fivetran.com/hc/en-us/community/topics/360001909373-Feature-Requests?sort_by=votes) to enhance the Zendesk Support connector to include historical schedule data. +## Tracking Ticket SLA Policies Into the Future +In our models we generate a future time series for ticket SLA policies. This is limited to a year to maintain performance. ## Zendesk Support First Reply Time SLA Opinionated Logic The logic for `first_reply_time` breach/achievement metrics within the `zendesk__ticket_metrics` and `zendesk__sla_policies` models are structured on the Zendesk Support definition of [first reply time SLA events](https://support.zendesk.com/hc/en-us/articles/4408821871642-Understanding-ticket-reply-time?page=2#topic_jvw_nqd_1hb). For example, this data model calculates first reply time to be the duration of time (business or calendar) between the creation of the ticket and the first public comment from either an `agent` or `admin`. This holds true regardless of when the first reply time SLA was applied to the ticket. diff --git a/models/intermediate/int_zendesk__schedule_holiday.sql b/models/intermediate/int_zendesk__schedule_holiday.sql index 72dbfc13..bbc064f5 100644 --- a/models/intermediate/int_zendesk__schedule_holiday.sql +++ b/models/intermediate/int_zendesk__schedule_holiday.sql @@ -1,16 +1,13 @@ {{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_holidays'])) }} /* - The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings. - End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time) + The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may + change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream + to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time). */ -with calendar_spine as ( - select - cast(date_day as {{ dbt.type_timestamp() }}) as date_day - from {{ ref('int_zendesk__calendar_spine') }} -), schedule as ( +with schedule as ( select * from {{ var('schedule') }} @@ -59,7 +56,7 @@ with calendar_spine as ( union all - -- Split holidays by week that span multiple weeks. + -- Split holidays by week that span multiple weeks since the schedule spine is based on weeks. select holiday_name, schedule_id, @@ -91,23 +88,24 @@ with calendar_spine as ( from expanded_holidays where holiday_weeks_spanned > 1 --- Explodes holidays into individual days by joining with the calendar_spine. This is necessary to remove schedules --- that occur during a holiday downstream. -), holiday_spine as ( +-- Create a record for each the holiday start and holiday end for each week to use downstream. +), split_holidays as ( + -- Creates a record that will be used for the time before a holiday + select + split_multiweek_holidays.*, + holiday_valid_from as holiday_date, + '0_gap' as holiday_start_or_end + from split_multiweek_holidays + + union all + -- Creates another record that will be used for the holiday itself select - split_multiweek_holidays.holiday_name, - split_multiweek_holidays.schedule_id, - split_multiweek_holidays.holiday_valid_from, - split_multiweek_holidays.holiday_valid_until, - split_multiweek_holidays.holiday_starting_sunday, - split_multiweek_holidays.holiday_ending_sunday, - calendar_spine.date_day as holiday_date - from split_multiweek_holidays - inner join calendar_spine - on split_multiweek_holidays.holiday_valid_from <= calendar_spine.date_day - and split_multiweek_holidays.holiday_valid_until >= calendar_spine.date_day + split_multiweek_holidays.*, + holiday_valid_until as holiday_date, + '1_holiday' as holiday_start_or_end + from split_multiweek_holidays ) select * -from holiday_spine +from split_holidays diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 2170a320..18854bbd 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -4,6 +4,10 @@ This model generates `valid_from` and `valid_until` timestamps for each schedule start_time and stop_time, accounting for timezone changes, holidays, and historical schedule adjustments. The inclusion of holidays and historical changes is controlled by variables `using_holidays` and `using_schedule_histories`. + + !!! Important distinction for holiday ranges: A holiday remains valid through the entire day specified by + the `valid_until` field. In contrast, schedule history and timezone `valid_until` values mark the end of + validity at the start of the specified day. */ with schedule_timezones as ( @@ -15,7 +19,8 @@ with schedule_timezones as ( select * from {{ ref('int_zendesk__schedule_holiday') }} --- Joins in the holidays if using or casts nulls if not. +-- Joins the schedules with holidays, ensuring holidays fall within the valid schedule period. +-- If there are no holidays, the columns are filled with null values. ), join_holidays as ( select schedule_timezones.schedule_id, @@ -34,54 +39,23 @@ with schedule_timezones as ( schedule_holidays.holiday_valid_from, schedule_holidays.holiday_valid_until, schedule_holidays.holiday_starting_sunday, - schedule_holidays.holiday_ending_sunday + schedule_holidays.holiday_ending_sunday, + schedule_holidays.holiday_start_or_end from schedule_timezones left join schedule_holidays on schedule_holidays.schedule_id = schedule_timezones.schedule_id and schedule_holidays.holiday_date >= schedule_timezones.schedule_valid_from and schedule_holidays.holiday_date < schedule_timezones.schedule_valid_until -), split_holidays as ( - -- Creates a record that will be used for the time before a holiday - select - join_holidays.*, - case - when holiday_valid_from = holiday_date - then '0_gap' -- the number is for ordering later - end as holiday_start_or_end - from join_holidays - where holiday_date is not null - - union all - - -- Creates another record that will be used for the holiday itself - select - join_holidays.*, - case - when holiday_valid_until = holiday_date - then '1_holiday' -- the number is for ordering later - end as holiday_start_or_end - from join_holidays - where holiday_date is not null - - union all - - -- keep records for weeks with no holiday - select - join_holidays.*, - cast(null as {{ dbt.type_string() }}) as holiday_start_or_end - from join_holidays - where holiday_date is null - +-- Find and count all holidays that fall within a schedule range. ), valid_from_partition as( select - split_holidays.*, + join_holidays.*, row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index, count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index - from split_holidays - -- filter out records that have a holiday_date but aren't marked as a start or end. - where not (holiday_date is not null and holiday_start_or_end is null) + from join_holidays +-- Label the partition start and add a row for to account for the partition end if there are multiple valid periods. ), add_partition_end_row as( select schedule_id, @@ -135,8 +109,9 @@ with schedule_timezones as ( max_valid_from_index from valid_from_partition where max_valid_from_index > 1 - and valid_from_index = max_valid_from_index + and valid_from_index = max_valid_from_index -- this finds the last rows to duplicate +-- Adjusts and fills the valid from and valid until times for each partition, taking into account the partition start, gap, or holiday. ), adjust_ranges as( select add_partition_end_row.*, @@ -179,16 +154,17 @@ with schedule_timezones as ( holiday_valid_until, holiday_starting_sunday, holiday_ending_sunday, - 'partition_end' as holiday_start_or_end, + holiday_start_or_end, valid_from_index, case when holiday_start_or_end = '1_holiday' then 'holiday' else change_type end as change_type from adjust_ranges - -- filter out irrelevant records + -- filter out irrelevant records after adjusting the ranges where not (valid_from >= valid_until and holiday_date is not null) +-- Converts holiday valid_from and valid_until times into minutes from the start of the week, adjusting for timezones. ), valid_minutes as( select holiday_weeks.*, @@ -209,6 +185,7 @@ with schedule_timezones as ( end as holiday_valid_until_minutes_from_week_start from holiday_weeks +-- Identifies whether a schedule overlaps with a holiday by comparing start and end times with holiday minutes. ), find_holidays as( select schedule_id, @@ -227,6 +204,7 @@ with schedule_timezones as ( count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc) as number_holidays_in_week from valid_minutes +-- Filter out records where holiday overlaps don't match, ensuring each schedule's holiday status is consistent. ), filter_holidays as( select *, @@ -236,7 +214,7 @@ with schedule_timezones as ( union all - -- we want to count the number of records for each schedule start_time_utc and end_time_utc for filtering later + -- CFount the number of records for each schedule start_time_utc and end_time_utc for filtering later. select distinct *, cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_name) diff --git a/models/intermediate/int_zendesk__schedule_timezones.sql b/models/intermediate/int_zendesk__schedule_timezones.sql index b2b75470..5ada302e 100644 --- a/models/intermediate/int_zendesk__schedule_timezones.sql +++ b/models/intermediate/int_zendesk__schedule_timezones.sql @@ -233,8 +233,7 @@ with split_timezones as ( -- timezones that fall completely within the bounds of the schedule or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until) -{% else %} - +{% else %} -- when not using schedule histories ), final_schedule as ( select schedule.schedule_id, @@ -267,6 +266,7 @@ with split_timezones as ( -- use dbt_date.week_start to ensure we truncate to Sunday cast({{ dbt_date.week_start('schedule_valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday, cast({{ dbt_date.week_start('schedule_valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday, + -- Check if the start fo the schedule was from a schedule or timezone change for tracking downstream. case when schedule_valid_from = timezone_valid_from then 'timezone' else 'schedule' From d97f7140e1cf714448d66b67d061ad7c517e4d60 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 9 Oct 2024 11:10:43 -0500 Subject: [PATCH 66/76] update decision log --- DECISIONLOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/DECISIONLOG.md b/DECISIONLOG.md index 7cb3c7d7..cdc4b4bf 100644 --- a/DECISIONLOG.md +++ b/DECISIONLOG.md @@ -1,7 +1,11 @@ # Decision Log -## Multiple Schedule Changes in a Day +## Schedule History +### Handling Multiple Schedule Changes in a Day +While integrating schedule changes from the audit_log source, we observed that multiple changes can occur on the same day, often when users are still finalizing a schedule. To maintain clarity and align with our day-based downstream logic, we decided to capture only the last change made on any given day. If this approach proves insufficient for your use case, please submit a feature request to enable support for multiple changes within a single day. +### Backfilling the Schedule History +Although the schedule history extracted from the audit log includes the most recent schedule, we exclude it in the `int_zendesk__schedule_history` model. Instead, we rely on the schedule from `stg_zendesk__schedule`, since it represents the live schedule. This approach also allows users who are not using schedule histories to easily disable the history feature. We join the live schedule with the schedule history model and bridge the valid_from and valid_until dates to maintain consistency. ## Tracking Ticket SLA Policies Into the Future In our models we generate a future time series for ticket SLA policies. This is limited to a year to maintain performance. From 657331435df842e0042e4049d8da248078000b4f Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 9 Oct 2024 11:49:15 -0500 Subject: [PATCH 67/76] update changelog --- CHANGELOG.md | 30 +++++++++++++++++++++++++++--- 1 file changed, 27 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ecdbee6..e1125081 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,34 @@ -# dbt_zendesk v0.17.0 +# dbt_zendesk v0.18.0 [PR #171](https://github.com/fivetran/dbt_zendesk/pull/171) includes the following changes: -- Update this +## Breaking Changes (Full refresh required after upgrading) +### Schedule Change Support +- Support for schedule changes has been added: + - Schedule changes are now extracted directly from the audit log, providing a view of schedule modifications over time. + - This feature is enabled by default, but can be easily turned off by setting `using_schedule_histories` to `false` in `dbt_project.yml`. + - The `int_zendesk__schedule_spine` model is now enhanced to incorporate these schedule changes, making it possible for downstream models to reflect the most up-to-date schedule data. + - This improves granularity for Zendesk metrics related to agent availability, SLA tracking, and time-based performance analysis, allowing for more accurate reporting. + +## New Features +- Holiday support: Users can now choose to disable holiday tracking by setting `using_holidays` to `false` in `dbt_project.yml`. +- New intermediate models have been introduced to streamline both the readability and maintainability: + - `int_zendesk__timezone_daylight`: A utility model that maintains a record of daylight savings adjustments for each time zone. + - `int_zendesk__schedule_history`: Captures a full history of schedule changes for each `schedule_id`. + - `int_zendesk__schedule_timezones`: Merges schedule history with time zone shifts. + - `int_zendesk__schedule_holidays`: Identifies and calculates holiday periods for each schedule. +- Rebuilt logic in `int_zendesk__schedule_spine` to consolidate updates from the new intermediate models. -# dbt_zendesk v0.17.0 +## Bug Fixes +- Resolved a bug in the `int_zendesk__schedule_spine` model where users experienced large gaps in non-holiday periods. The updated logic addresses this issue. + +## Under the Hood Improvements +- Replaced instances of `dbt.date_trunc` with `dbt_date.week_start` to standardize week start dates to Sunday across all warehouses, since our schedule logic relies on consistent weeks. +- Replaced the deprecated `dbt.current_timestamp_backcompat()` function with `dbt.current_timestamp()` to ensure all timestamps are captured in UTC. +- Added seed data for `audit_log` to enhance integration testing capabilities. +- Introduced new helper macros, `clean_data` and `regex_extract`, to process complex text of the schedule changes extracted from audit logs. +- Updated `int_zendesk__calendar_spine` logic to prevent errors during compilation before the first full run, ensuring a smoother development experience. +# dbt_zendesk v0.17.0 ## New model ([#161](https://github.com/fivetran/dbt_zendesk/pull/161)) - Addition of the `zendesk__document` model, designed to structure Zendesk textual data for vectorization and integration into NLP workflows. The model outputs a table with: - `document_id`: Corresponding to the `ticket_id` From 0b0a05d56155dc1781917c801fd17b26c1636787 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 9 Oct 2024 11:54:27 -0500 Subject: [PATCH 68/76] regen docs --- docs/catalog.json | 2 +- docs/manifest.json | 2 +- docs/run_results.json | 1 - integration_tests/dbt_project.yml | 7 +++---- 4 files changed, 5 insertions(+), 7 deletions(-) delete mode 100644 docs/run_results.json diff --git a/docs/catalog.json b/docs/catalog.json index e10aa459..2f77cb11 100644 --- a/docs/catalog.json +++ b/docs/catalog.json @@ -1 +1 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", "dbt_version": "1.8.3", "generated_at": "2024-09-03T18:15:40.230204Z", "invocation_id": "ab89e8de-0760-4824-96db-0e8bd67c9f64", "env": {}}, "nodes": {"seed.zendesk_integration_tests.brand_data_postgres": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "brand_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.brand_data_postgres"}, "seed.zendesk_integration_tests.daylight_time_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "daylight_time_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.daylight_time_data"}, "seed.zendesk_integration_tests.domain_name_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "domain_name_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.domain_name_data"}, "seed.zendesk_integration_tests.group_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "group_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.group_data"}, "seed.zendesk_integration_tests.organization_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_data"}, "seed.zendesk_integration_tests.organization_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_tag_data"}, "seed.zendesk_integration_tests.schedule_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_data"}, "seed.zendesk_integration_tests.schedule_holiday_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_holiday_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data"}, "seed.zendesk_integration_tests.ticket_comment_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_comment_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_comment_data"}, "seed.zendesk_integration_tests.ticket_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_data"}, "seed.zendesk_integration_tests.ticket_field_history_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_field_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data"}, "seed.zendesk_integration_tests.ticket_form_history_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_form_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data"}, "seed.zendesk_integration_tests.ticket_schedule_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data"}, "seed.zendesk_integration_tests.ticket_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_tag_data"}, "seed.zendesk_integration_tests.time_zone_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "time_zone_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.time_zone_data"}, "seed.zendesk_integration_tests.user_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_data"}, "seed.zendesk_integration_tests.user_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_tag_data"}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "integer", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "integer", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "integer", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "bigint", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "bigint", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "bigint", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "numeric", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "numeric", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "numeric", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "numeric", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "numeric", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours"}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "double precision", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "double precision", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "double precision", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "double precision", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours"}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses"}, "model.zendesk.int_zendesk__assignee_updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__assignee_updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "assignee_id": {"type": "bigint", "index": 2, "name": "assignee_id", "comment": null}, "last_updated": {"type": "timestamp without time zone", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "bigint", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__assignee_updates"}, "model.zendesk.int_zendesk__comment_metrics": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comment_metrics", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "last_comment_added_at": {"type": "timestamp without time zone", "index": 2, "name": "last_comment_added_at", "comment": null}, "count_public_agent_comments": {"type": "bigint", "index": 3, "name": "count_public_agent_comments", "comment": null}, "count_agent_comments": {"type": "bigint", "index": 4, "name": "count_agent_comments", "comment": null}, "count_end_user_comments": {"type": "bigint", "index": 5, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "bigint", "index": 6, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "bigint", "index": 7, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "bigint", "index": 8, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "bigint", "index": 9, "name": "count_ticket_handoffs", "comment": null}, "count_agent_replies": {"type": "bigint", "index": 10, "name": "count_agent_replies", "comment": null}, "is_one_touch_resolution": {"type": "boolean", "index": 11, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "boolean", "index": 12, "name": "is_two_touch_resolution", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__comment_metrics"}, "model.zendesk.int_zendesk__field_calendar_spine": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"date_day": {"type": "date", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "text", "index": 3, "name": "ticket_day_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_calendar_spine"}, "model.zendesk.int_zendesk__field_history_pivot": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_pivot", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "date_day": {"type": "date", "index": 2, "name": "date_day", "comment": null}, "status": {"type": "text", "index": 3, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 4, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 5, "name": "priority", "comment": null}, "ticket_day_id": {"type": "text", "index": 6, "name": "ticket_day_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_pivot"}, "model.zendesk.int_zendesk__field_history_scd": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_scd", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"valid_from": {"type": "date", "index": 1, "name": "valid_from", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "text", "index": 3, "name": "ticket_day_id", "comment": null}, "status": {"type": "text", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_scd"}, "model.zendesk.int_zendesk__latest_ticket_form": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_form_id": {"type": "bigint", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "text", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "latest_form_index": {"type": "bigint", "index": 7, "name": "latest_form_index", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__latest_ticket_form"}, "model.zendesk.int_zendesk__organization_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__organization_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "integer", "index": 6, "name": "external_id", "comment": null}, "organization_tags": {"type": "text", "index": 7, "name": "organization_tags", "comment": null}, "domain_names": {"type": "text", "index": 8, "name": "domain_names", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__organization_aggregates"}, "model.zendesk.int_zendesk__reply_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "sla_schedule_start_at": {"type": "timestamp without time zone", "index": 6, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_schedule_end_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "sum_lapsed_business_minutes": {"type": "numeric", "index": 9, "name": "sum_lapsed_business_minutes", "comment": null}, "in_business_hours": {"type": "boolean", "index": 10, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_breach_at", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 12, "name": "is_breached_during_schedule", "comment": null}, "total_schedule_weekly_business_minutes": {"type": "numeric", "index": 13, "name": "total_schedule_weekly_business_minutes", "comment": null}, "sla_breach_exact_time": {"type": "timestamp without time zone", "index": 14, "name": "sla_breach_exact_time", "comment": null}, "week_number": {"type": "integer", "index": 15, "name": "week_number", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours"}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "text", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "text", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "bigint", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "text", "index": 10, "name": "sla_policy_name", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours"}, "model.zendesk.int_zendesk__reply_time_combined": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_combined", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "sum_lapsed_business_minutes": {"type": "numeric", "index": 6, "name": "sum_lapsed_business_minutes", "comment": null}, "target": {"type": "integer", "index": 7, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 8, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 9, "name": "sla_breach_at", "comment": null}, "week_number": {"type": "numeric", "index": 10, "name": "week_number", "comment": null}, "sla_schedule_start_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "timestamp without time zone", "index": 12, "name": "sla_schedule_end_at", "comment": null}, "agent_reply_at": {"type": "timestamp without time zone", "index": 13, "name": "agent_reply_at", "comment": null}, "next_solved_at": {"type": "timestamp without time zone", "index": 14, "name": "next_solved_at", "comment": null}, "day_index": {"type": "bigint", "index": 15, "name": "day_index", "comment": null}, "next_schedule_start": {"type": "timestamp without time zone", "index": 16, "name": "next_schedule_start", "comment": null}, "first_sla_breach_at": {"type": "timestamp without time zone", "index": 17, "name": "first_sla_breach_at", "comment": null}, "sum_lapsed_business_minutes_new": {"type": "numeric", "index": 18, "name": "sum_lapsed_business_minutes_new", "comment": null}, "total_runtime_minutes": {"type": "double precision", "index": 19, "name": "total_runtime_minutes", "comment": null}, "current_time_check": {"type": "timestamp with time zone", "index": 20, "name": "current_time_check", "comment": null}, "updated_sla_policy_starts_at": {"type": "timestamp without time zone", "index": 21, "name": "updated_sla_policy_starts_at", "comment": null}, "is_stale_sla_policy": {"type": "boolean", "index": 22, "name": "is_stale_sla_policy", "comment": null}, "is_sla_breached": {"type": "boolean", "index": 23, "name": "is_sla_breached", "comment": null}, "total_new_minutes": {"type": "double precision", "index": 24, "name": "total_new_minutes", "comment": null}, "sla_update_at": {"type": "timestamp without time zone", "index": 25, "name": "sla_update_at", "comment": null}, "sla_elapsed_time": {"type": "double precision", "index": 26, "name": "sla_elapsed_time", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_combined"}, "model.zendesk.int_zendesk__requester_updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "requester_id": {"type": "bigint", "index": 2, "name": "requester_id", "comment": null}, "last_updated": {"type": "timestamp without time zone", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "bigint", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_updates"}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "integer", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "integer", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "integer", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "bigint", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "bigint", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "bigint", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "numeric", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "numeric", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "numeric", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "numeric", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "numeric", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours"}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "double precision", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "double precision", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "double precision", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "double precision", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"}, "model.zendesk.int_zendesk__schedule_spine": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_spine", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "valid_from": {"type": "timestamp without time zone", "index": 2, "name": "valid_from", "comment": null}, "valid_until": {"type": "timestamp without time zone", "index": 3, "name": "valid_until", "comment": null}, "start_time_utc": {"type": "bigint", "index": 4, "name": "start_time_utc", "comment": null}, "end_time_utc": {"type": "bigint", "index": 5, "name": "end_time_utc", "comment": null}, "is_holiday_week": {"type": "boolean", "index": 6, "name": "is_holiday_week", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_spine"}, "model.zendesk.int_zendesk__sla_policy_applied": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "text", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "text", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "bigint", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "text", "index": 10, "name": "sla_policy_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__sla_policy_applied"}, "model.zendesk.int_zendesk__ticket_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_aggregates"}, "model.zendesk.int_zendesk__ticket_comment_document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_comment_id": {"type": "bigint", "index": 1, "name": "ticket_comment_id", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "comment_time": {"type": "timestamp without time zone", "index": 3, "name": "comment_time", "comment": null}, "comment_markdown": {"type": "text", "index": 4, "name": "comment_markdown", "comment": null}, "comment_tokens": {"type": "integer", "index": 5, "name": "comment_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_comment_document"}, "model.zendesk.int_zendesk__ticket_comment_documents_grouped": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_documents_grouped", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "integer", "index": 1, "name": "ticket_id", "comment": null}, "chunk_index": {"type": "integer", "index": 2, "name": "chunk_index", "comment": null}, "comments_group_markdown": {"type": "text", "index": 3, "name": "comments_group_markdown", "comment": null}, "chunk_tokens": {"type": "bigint", "index": 4, "name": "chunk_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_comment_documents_grouped"}, "model.zendesk.int_zendesk__ticket_document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_markdown": {"type": "text", "index": 2, "name": "ticket_markdown", "comment": null}, "ticket_tokens": {"type": "integer", "index": 3, "name": "ticket_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_document"}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "first_agent_assignment_date": {"type": "timestamp without time zone", "index": 2, "name": "first_agent_assignment_date", "comment": null}, "first_assignee_id": {"type": "text", "index": 3, "name": "first_assignee_id", "comment": null}, "last_agent_assignment_date": {"type": "timestamp without time zone", "index": 4, "name": "last_agent_assignment_date", "comment": null}, "last_assignee_id": {"type": "text", "index": 5, "name": "last_assignee_id", "comment": null}, "assignee_stations_count": {"type": "bigint", "index": 6, "name": "assignee_stations_count", "comment": null}, "unique_assignee_count": {"type": "bigint", "index": 7, "name": "unique_assignee_count", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "double precision", "index": 8, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee"}, "model.zendesk.int_zendesk__ticket_historical_group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "group_stations_count": {"type": "bigint", "index": 2, "name": "group_stations_count", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_group"}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "latest_satisfaction_reason": {"type": "text", "index": 2, "name": "latest_satisfaction_reason", "comment": null}, "latest_satisfaction_comment": {"type": "text", "index": 3, "name": "latest_satisfaction_comment", "comment": null}, "first_satisfaction_score": {"type": "text", "index": 4, "name": "first_satisfaction_score", "comment": null}, "latest_satisfaction_score": {"type": "text", "index": 5, "name": "latest_satisfaction_score", "comment": null}, "count_satisfaction_scores": {"type": "bigint", "index": 6, "name": "count_satisfaction_scores", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 7, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 8, "name": "is_bad_to_good_satisfaction_score", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction"}, "model.zendesk.int_zendesk__ticket_historical_status": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "status_duration_calendar_minutes": {"type": "double precision", "index": 4, "name": "status_duration_calendar_minutes", "comment": null}, "status": {"type": "text", "index": 5, "name": "status", "comment": null}, "ticket_status_counter": {"type": "bigint", "index": 6, "name": "ticket_status_counter", "comment": null}, "unique_status_counter": {"type": "bigint", "index": 7, "name": "unique_status_counter", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_status"}, "model.zendesk.int_zendesk__ticket_schedules": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_schedules", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "schedule_id": {"type": "text", "index": 2, "name": "schedule_id", "comment": null}, "schedule_created_at": {"type": "timestamp without time zone", "index": 3, "name": "schedule_created_at", "comment": null}, "schedule_invalidated_at": {"type": "timestamp without time zone", "index": 4, "name": "schedule_invalidated_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_schedules"}, "model.zendesk.int_zendesk__updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "text", "index": 2, "name": "field_name", "comment": null}, "value": {"type": "text", "index": 3, "name": "value", "comment": null}, "is_public": {"type": "boolean", "index": 4, "name": "is_public", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 6, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 7, "name": "valid_ending_at", "comment": null}, "ticket_created_date": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__updates"}, "model.zendesk.int_zendesk__user_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__user_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "bigint", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 5, "name": "last_login_at", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "email": {"type": "text", "index": 8, "name": "email", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}, "organization_id": {"type": "bigint", "index": 10, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 11, "name": "phone", "comment": null}, "role": {"type": "text", "index": 12, "name": "role", "comment": null}, "ticket_restriction": {"type": "text", "index": 13, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 14, "name": "time_zone", "comment": null}, "locale": {"type": "text", "index": 15, "name": "locale", "comment": null}, "is_active": {"type": "boolean", "index": 16, "name": "is_active", "comment": null}, "is_suspended": {"type": "boolean", "index": 17, "name": "is_suspended", "comment": null}, "user_tags": {"type": "text", "index": 18, "name": "user_tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__user_aggregates"}, "model.zendesk_source.stg_zendesk__brand": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"brand_id": {"type": "bigint", "index": 1, "name": "brand_id", "comment": null}, "brand_url": {"type": "text", "index": 2, "name": "brand_url", "comment": null}, "name": {"type": "text", "index": 3, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 4, "name": "subdomain", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand"}, "model.zendesk_source.stg_zendesk__brand_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp"}, "model.zendesk_source.stg_zendesk__daylight_time": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"daylight_end_utc": {"type": "timestamp without time zone", "index": 1, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 2, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 3, "name": "daylight_start_utc", "comment": null}, "time_zone": {"type": "text", "index": 4, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 5, "name": "year", "comment": null}, "daylight_offset_minutes": {"type": "integer", "index": 6, "name": "daylight_offset_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time"}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp"}, "model.zendesk_source.stg_zendesk__domain_name": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "domain_name": {"type": "text", "index": 2, "name": "domain_name", "comment": null}, "index": {"type": "integer", "index": 3, "name": "index", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name"}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp"}, "model.zendesk_source.stg_zendesk__group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"group_id": {"type": "bigint", "index": 1, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 2, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group"}, "model.zendesk_source.stg_zendesk__group_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group_tmp"}, "model.zendesk_source.stg_zendesk__organization": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "integer", "index": 6, "name": "external_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization"}, "model.zendesk_source.stg_zendesk__organization_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag"}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp"}, "model.zendesk_source.stg_zendesk__organization_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp"}, "model.zendesk_source.stg_zendesk__schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "end_time": {"type": "bigint", "index": 2, "name": "end_time", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "schedule_name": {"type": "text", "index": 4, "name": "schedule_name", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "time_zone": {"type": "text", "index": 6, "name": "time_zone", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule"}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"_fivetran_deleted": {"type": "boolean", "index": 1, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "holiday_end_date_at": {"type": "timestamp without time zone", "index": 3, "name": "holiday_end_date_at", "comment": null}, "holiday_id": {"type": "text", "index": 4, "name": "holiday_id", "comment": null}, "holiday_name": {"type": "text", "index": 5, "name": "holiday_name", "comment": null}, "schedule_id": {"type": "text", "index": 6, "name": "schedule_id", "comment": null}, "holiday_start_date_at": {"type": "timestamp without time zone", "index": 7, "name": "holiday_start_date_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday"}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp"}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket"}, "model.zendesk_source.stg_zendesk__ticket_comment": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_comment_id": {"type": "bigint", "index": 1, "name": "ticket_comment_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "body": {"type": "text", "index": 4, "name": "body", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "is_public": {"type": "boolean", "index": 6, "name": "is_public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "user_id": {"type": "bigint", "index": 8, "name": "user_id", "comment": null}, "is_facebook_comment": {"type": "boolean", "index": 9, "name": "is_facebook_comment", "comment": null}, "is_tweet": {"type": "boolean", "index": 10, "name": "is_tweet", "comment": null}, "is_voice_comment": {"type": "boolean", "index": 11, "name": "is_voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment"}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp"}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "text", "index": 2, "name": "field_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 4, "name": "valid_ending_at", "comment": null}, "value": {"type": "text", "index": 5, "name": "value", "comment": null}, "user_id": {"type": "bigint", "index": 6, "name": "user_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history"}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_form_id": {"type": "bigint", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "text", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history"}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "schedule_id": {"type": "text", "index": 3, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule"}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "integer", "index": 1, "name": "ticket_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag"}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp"}, "model.zendesk_source.stg_zendesk__time_zone": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"standard_offset": {"type": "text", "index": 1, "name": "standard_offset", "comment": null}, "time_zone": {"type": "text", "index": 2, "name": "time_zone", "comment": null}, "standard_offset_minutes": {"type": "integer", "index": 3, "name": "standard_offset_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone"}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp"}, "model.zendesk_source.stg_zendesk__user": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "bigint", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 5, "name": "last_login_at", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "email": {"type": "text", "index": 8, "name": "email", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}, "organization_id": {"type": "bigint", "index": 10, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 11, "name": "phone", "comment": null}, "role": {"type": "text", "index": 12, "name": "role", "comment": null}, "ticket_restriction": {"type": "text", "index": 13, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 14, "name": "time_zone", "comment": null}, "locale": {"type": "text", "index": 15, "name": "locale", "comment": null}, "is_active": {"type": "boolean", "index": 16, "name": "is_active", "comment": null}, "is_suspended": {"type": "boolean", "index": 17, "name": "is_suspended", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user"}, "model.zendesk_source.stg_zendesk__user_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag"}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp"}, "model.zendesk_source.stg_zendesk__user_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tmp"}, "model.zendesk.zendesk__document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"document_id": {"type": "text", "index": 1, "name": "document_id", "comment": null}, "chunk_index": {"type": "integer", "index": 2, "name": "chunk_index", "comment": null}, "chunk_tokens_approximate": {"type": "bigint", "index": 3, "name": "chunk_tokens_approximate", "comment": null}, "chunk": {"type": "text", "index": 4, "name": "chunk", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__document"}, "model.zendesk.zendesk__sla_policies": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__sla_policies", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"sla_event_id": {"type": "text", "index": 1, "name": "sla_event_id", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 3, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 4, "name": "metric", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 7, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 8, "name": "sla_breach_at", "comment": null}, "sla_elapsed_time": {"type": "double precision", "index": 9, "name": "sla_elapsed_time", "comment": null}, "is_active_sla": {"type": "boolean", "index": 10, "name": "is_active_sla", "comment": null}, "is_sla_breach": {"type": "boolean", "index": 11, "name": "is_sla_breach", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__sla_policies"}, "model.zendesk.zendesk__ticket_backlog": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_backlog", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"date_day": {"type": "date", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "status": {"type": "text", "index": 3, "name": "status", "comment": null}, "created_channel": {"type": "text", "index": 4, "name": "created_channel", "comment": null}, "assignee_name": {"type": "text", "index": 5, "name": "assignee_name", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_backlog"}, "model.zendesk.zendesk__ticket_enriched": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_enriched", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "text", "index": 33, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "bigint", "index": 34, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "text", "index": 35, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "text", "index": 36, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "text", "index": 37, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "text", "index": 38, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 39, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 40, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "text", "index": 41, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "text", "index": 42, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "bigint", "index": 43, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "timestamp without time zone", "index": 44, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "timestamp without time zone", "index": 45, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "text", "index": 46, "name": "requester_role", "comment": null}, "requester_email": {"type": "text", "index": 47, "name": "requester_email", "comment": null}, "requester_name": {"type": "text", "index": 48, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "boolean", "index": 49, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "text", "index": 50, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "text", "index": 51, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "bigint", "index": 52, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "timestamp without time zone", "index": 53, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "timestamp without time zone", "index": 54, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "bigint", "index": 55, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "text", "index": 56, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "text", "index": 57, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "integer", "index": 58, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "timestamp without time zone", "index": 59, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "timestamp without time zone", "index": 60, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "bigint", "index": 61, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "text", "index": 62, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "boolean", "index": 63, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "text", "index": 64, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "text", "index": 65, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "boolean", "index": 66, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "text", "index": 67, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "text", "index": 68, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "bigint", "index": 69, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "text", "index": 70, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "text", "index": 71, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "text", "index": 72, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "boolean", "index": 73, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "text", "index": 74, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "text", "index": 75, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "bigint", "index": 76, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "timestamp without time zone", "index": 77, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "timestamp without time zone", "index": 78, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "text", "index": 79, "name": "group_name", "comment": null}, "organization_name": {"type": "text", "index": 80, "name": "organization_name", "comment": null}, "requester_tag": {"type": "text", "index": 81, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "text", "index": 82, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "text", "index": 83, "name": "assignee_tag", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_enriched"}, "model.zendesk.zendesk__ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_field_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_day_id": {"type": "text", "index": 1, "name": "ticket_day_id", "comment": null}, "date_day": {"type": "date", "index": 2, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 3, "name": "ticket_id", "comment": null}, "status": {"type": "text", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_field_history"}, "model.zendesk.zendesk__ticket_metrics": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_metrics", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "text", "index": 33, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "bigint", "index": 34, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "text", "index": 35, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "text", "index": 36, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "text", "index": 37, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "text", "index": 38, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 39, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 40, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "text", "index": 41, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "text", "index": 42, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "bigint", "index": 43, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "timestamp without time zone", "index": 44, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "timestamp without time zone", "index": 45, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "text", "index": 46, "name": "requester_role", "comment": null}, "requester_email": {"type": "text", "index": 47, "name": "requester_email", "comment": null}, "requester_name": {"type": "text", "index": 48, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "boolean", "index": 49, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "text", "index": 50, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "text", "index": 51, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "bigint", "index": 52, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "timestamp without time zone", "index": 53, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "timestamp without time zone", "index": 54, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "bigint", "index": 55, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "text", "index": 56, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "text", "index": 57, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "integer", "index": 58, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "timestamp without time zone", "index": 59, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "timestamp without time zone", "index": 60, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "bigint", "index": 61, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "text", "index": 62, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "boolean", "index": 63, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "text", "index": 64, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "text", "index": 65, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "boolean", "index": 66, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "text", "index": 67, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "text", "index": 68, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "bigint", "index": 69, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "text", "index": 70, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "text", "index": 71, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "text", "index": 72, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "boolean", "index": 73, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "text", "index": 74, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "text", "index": 75, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "bigint", "index": 76, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "timestamp without time zone", "index": 77, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "timestamp without time zone", "index": 78, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "text", "index": 79, "name": "group_name", "comment": null}, "organization_name": {"type": "text", "index": 80, "name": "organization_name", "comment": null}, "requester_tag": {"type": "text", "index": 81, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "text", "index": 82, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "text", "index": 83, "name": "assignee_tag", "comment": null}, "first_reply_time_calendar_minutes": {"type": "double precision", "index": 84, "name": "first_reply_time_calendar_minutes", "comment": null}, "total_reply_time_calendar_minutes": {"type": "double precision", "index": 85, "name": "total_reply_time_calendar_minutes", "comment": null}, "count_agent_comments": {"type": "bigint", "index": 86, "name": "count_agent_comments", "comment": null}, "count_public_agent_comments": {"type": "bigint", "index": 87, "name": "count_public_agent_comments", "comment": null}, "count_end_user_comments": {"type": "bigint", "index": 88, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "bigint", "index": 89, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "bigint", "index": 90, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "bigint", "index": 91, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "bigint", "index": 92, "name": "count_ticket_handoffs", "comment": null}, "ticket_last_comment_date": {"type": "timestamp without time zone", "index": 93, "name": "ticket_last_comment_date", "comment": null}, "unique_assignee_count": {"type": "bigint", "index": 94, "name": "unique_assignee_count", "comment": null}, "assignee_stations_count": {"type": "bigint", "index": 95, "name": "assignee_stations_count", "comment": null}, "group_stations_count": {"type": "bigint", "index": 96, "name": "group_stations_count", "comment": null}, "first_assignee_id": {"type": "text", "index": 97, "name": "first_assignee_id", "comment": null}, "last_assignee_id": {"type": "text", "index": 98, "name": "last_assignee_id", "comment": null}, "first_agent_assignment_date": {"type": "timestamp without time zone", "index": 99, "name": "first_agent_assignment_date", "comment": null}, "last_agent_assignment_date": {"type": "timestamp without time zone", "index": 100, "name": "last_agent_assignment_date", "comment": null}, "first_solved_at": {"type": "timestamp without time zone", "index": 101, "name": "first_solved_at", "comment": null}, "last_solved_at": {"type": "timestamp without time zone", "index": 102, "name": "last_solved_at", "comment": null}, "first_assignment_to_resolution_calendar_minutes": {"type": "double precision", "index": 103, "name": "first_assignment_to_resolution_calendar_minutes", "comment": null}, "last_assignment_to_resolution_calendar_minutes": {"type": "double precision", "index": 104, "name": "last_assignment_to_resolution_calendar_minutes", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "double precision", "index": 105, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}, "first_resolution_calendar_minutes": {"type": "double precision", "index": 106, "name": "first_resolution_calendar_minutes", "comment": null}, "final_resolution_calendar_minutes": {"type": "double precision", "index": 107, "name": "final_resolution_calendar_minutes", "comment": null}, "count_resolutions": {"type": "bigint", "index": 108, "name": "count_resolutions", "comment": null}, "count_reopens": {"type": "bigint", "index": 109, "name": "count_reopens", "comment": null}, "ticket_deleted_count": {"type": "bigint", "index": 110, "name": "ticket_deleted_count", "comment": null}, "total_ticket_recoveries": {"type": "bigint", "index": 111, "name": "total_ticket_recoveries", "comment": null}, "last_status_assignment_date": {"type": "timestamp without time zone", "index": 112, "name": "last_status_assignment_date", "comment": null}, "new_status_duration_in_calendar_minutes": {"type": "double precision", "index": 113, "name": "new_status_duration_in_calendar_minutes", "comment": null}, "open_status_duration_in_calendar_minutes": {"type": "double precision", "index": 114, "name": "open_status_duration_in_calendar_minutes", "comment": null}, "agent_wait_time_in_calendar_minutes": {"type": "double precision", "index": 115, "name": "agent_wait_time_in_calendar_minutes", "comment": null}, "requester_wait_time_in_calendar_minutes": {"type": "double precision", "index": 116, "name": "requester_wait_time_in_calendar_minutes", "comment": null}, "solve_time_in_calendar_minutes": {"type": "double precision", "index": 117, "name": "solve_time_in_calendar_minutes", "comment": null}, "agent_work_time_in_calendar_minutes": {"type": "double precision", "index": 118, "name": "agent_work_time_in_calendar_minutes", "comment": null}, "on_hold_time_in_calendar_minutes": {"type": "double precision", "index": 119, "name": "on_hold_time_in_calendar_minutes", "comment": null}, "total_agent_replies": {"type": "bigint", "index": 120, "name": "total_agent_replies", "comment": null}, "requester_last_login_age_minutes": {"type": "double precision", "index": 121, "name": "requester_last_login_age_minutes", "comment": null}, "assignee_last_login_age_minutes": {"type": "double precision", "index": 122, "name": "assignee_last_login_age_minutes", "comment": null}, "unsolved_ticket_age_minutes": {"type": "double precision", "index": 123, "name": "unsolved_ticket_age_minutes", "comment": null}, "unsolved_ticket_age_since_update_minutes": {"type": "double precision", "index": 124, "name": "unsolved_ticket_age_since_update_minutes", "comment": null}, "is_one_touch_resolution": {"type": "boolean", "index": 125, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "boolean", "index": 126, "name": "is_two_touch_resolution", "comment": null}, "is_multi_touch_resolution": {"type": "boolean", "index": 127, "name": "is_multi_touch_resolution", "comment": null}, "first_resolution_business_minutes": {"type": "numeric", "index": 128, "name": "first_resolution_business_minutes", "comment": null}, "full_resolution_business_minutes": {"type": "numeric", "index": 129, "name": "full_resolution_business_minutes", "comment": null}, "first_reply_time_business_minutes": {"type": "numeric", "index": 130, "name": "first_reply_time_business_minutes", "comment": null}, "agent_wait_time_in_business_minutes": {"type": "numeric", "index": 131, "name": "agent_wait_time_in_business_minutes", "comment": null}, "requester_wait_time_in_business_minutes": {"type": "numeric", "index": 132, "name": "requester_wait_time_in_business_minutes", "comment": null}, "solve_time_in_business_minutes": {"type": "numeric", "index": 133, "name": "solve_time_in_business_minutes", "comment": null}, "agent_work_time_in_business_minutes": {"type": "numeric", "index": 134, "name": "agent_work_time_in_business_minutes", "comment": null}, "on_hold_time_in_business_minutes": {"type": "numeric", "index": 135, "name": "on_hold_time_in_business_minutes", "comment": null}, "new_status_duration_in_business_minutes": {"type": "numeric", "index": 136, "name": "new_status_duration_in_business_minutes", "comment": null}, "open_status_duration_in_business_minutes": {"type": "numeric", "index": 137, "name": "open_status_duration_in_business_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_metrics"}, "model.zendesk.zendesk__ticket_summary": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_summary", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_count": {"type": "bigint", "index": 1, "name": "user_count", "comment": null}, "active_agent_count": {"type": "bigint", "index": 2, "name": "active_agent_count", "comment": null}, "deleted_user_count": {"type": "bigint", "index": 3, "name": "deleted_user_count", "comment": null}, "end_user_count": {"type": "bigint", "index": 4, "name": "end_user_count", "comment": null}, "suspended_user_count": {"type": "bigint", "index": 5, "name": "suspended_user_count", "comment": null}, "new_ticket_count": {"type": "bigint", "index": 6, "name": "new_ticket_count", "comment": null}, "on_hold_ticket_count": {"type": "bigint", "index": 7, "name": "on_hold_ticket_count", "comment": null}, "open_ticket_count": {"type": "bigint", "index": 8, "name": "open_ticket_count", "comment": null}, "pending_ticket_count": {"type": "bigint", "index": 9, "name": "pending_ticket_count", "comment": null}, "solved_ticket_count": {"type": "bigint", "index": 10, "name": "solved_ticket_count", "comment": null}, "problem_ticket_count": {"type": "bigint", "index": 11, "name": "problem_ticket_count", "comment": null}, "assigned_ticket_count": {"type": "bigint", "index": 12, "name": "assigned_ticket_count", "comment": null}, "reassigned_ticket_count": {"type": "bigint", "index": 13, "name": "reassigned_ticket_count", "comment": null}, "reopened_ticket_count": {"type": "bigint", "index": 14, "name": "reopened_ticket_count", "comment": null}, "surveyed_satisfaction_ticket_count": {"type": "bigint", "index": 15, "name": "surveyed_satisfaction_ticket_count", "comment": null}, "unassigned_unsolved_ticket_count": {"type": "bigint", "index": 16, "name": "unassigned_unsolved_ticket_count", "comment": null}, "unreplied_ticket_count": {"type": "bigint", "index": 17, "name": "unreplied_ticket_count", "comment": null}, "unreplied_unsolved_ticket_count": {"type": "bigint", "index": 18, "name": "unreplied_unsolved_ticket_count", "comment": null}, "unsolved_ticket_count": {"type": "bigint", "index": 19, "name": "unsolved_ticket_count", "comment": null}, "recovered_ticket_count": {"type": "bigint", "index": 20, "name": "recovered_ticket_count", "comment": null}, "deleted_ticket_count": {"type": "bigint", "index": 21, "name": "deleted_ticket_count", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_summary"}}, "sources": {"source.zendesk_source.zendesk.brand": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "brand_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.brand"}, "source.zendesk_source.zendesk.daylight_time": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "daylight_time_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.daylight_time"}, "source.zendesk_source.zendesk.domain_name": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "domain_name_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.domain_name"}, "source.zendesk_source.zendesk.group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "group_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.group"}, "source.zendesk_source.zendesk.organization": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization"}, "source.zendesk_source.zendesk.organization_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization_tag"}, "source.zendesk_source.zendesk.schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule"}, "source.zendesk_source.zendesk.schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_holiday_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule_holiday"}, "source.zendesk_source.zendesk.ticket_comment": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_comment_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_comment"}, "source.zendesk_source.zendesk.ticket": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket"}, "source.zendesk_source.zendesk.ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_field_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_field_history"}, "source.zendesk_source.zendesk.ticket_form_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_form_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_form_history"}, "source.zendesk_source.zendesk.ticket_schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_schedule"}, "source.zendesk_source.zendesk.ticket_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_tag"}, "source.zendesk_source.zendesk.time_zone": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "time_zone_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.time_zone"}, "source.zendesk_source.zendesk.user": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user"}, "source.zendesk_source.zendesk.user_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user_tag"}}, "errors": null} \ No newline at end of file +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", "dbt_version": "1.8.3", "generated_at": "2024-10-09T16:53:17.756232Z", "invocation_id": "ee1cfc0d-443e-4374-ad8a-25dc360a3746", "env": {}}, "nodes": {"seed.zendesk_integration_tests.audit_log_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "audit_log_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "integer", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "action": {"type": "text", "index": 3, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 4, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 5, "name": "change_description", "comment": null}, "created_at": {"type": "text", "index": 6, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 7, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 8, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 9, "name": "source_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.audit_log_data"}, "seed.zendesk_integration_tests.brand_data_postgres": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "brand_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.brand_data_postgres"}, "seed.zendesk_integration_tests.daylight_time_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "daylight_time_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.daylight_time_data"}, "seed.zendesk_integration_tests.domain_name_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "domain_name_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.domain_name_data"}, "seed.zendesk_integration_tests.group_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "group_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.group_data"}, "seed.zendesk_integration_tests.organization_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_data"}, "seed.zendesk_integration_tests.organization_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_tag_data"}, "seed.zendesk_integration_tests.schedule_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_data"}, "seed.zendesk_integration_tests.schedule_holiday_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_holiday_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data"}, "seed.zendesk_integration_tests.ticket_comment_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_comment_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_comment_data"}, "seed.zendesk_integration_tests.ticket_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_data"}, "seed.zendesk_integration_tests.ticket_field_history_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_field_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data"}, "seed.zendesk_integration_tests.ticket_form_history_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_form_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data"}, "seed.zendesk_integration_tests.ticket_schedule_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data"}, "seed.zendesk_integration_tests.ticket_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_tag_data"}, "seed.zendesk_integration_tests.time_zone_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "time_zone_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.time_zone_data"}, "seed.zendesk_integration_tests.user_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_data"}, "seed.zendesk_integration_tests.user_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_tag_data"}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "integer", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "integer", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "integer", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "bigint", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "bigint", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "bigint", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "numeric", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "numeric", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "numeric", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "numeric", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "numeric", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours"}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "double precision", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "double precision", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "double precision", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "double precision", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours"}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses"}, "model.zendesk.int_zendesk__assignee_updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__assignee_updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "assignee_id": {"type": "bigint", "index": 2, "name": "assignee_id", "comment": null}, "last_updated": {"type": "timestamp without time zone", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "bigint", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__assignee_updates"}, "model.zendesk.int_zendesk__comment_metrics": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comment_metrics", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "last_comment_added_at": {"type": "timestamp without time zone", "index": 2, "name": "last_comment_added_at", "comment": null}, "count_public_agent_comments": {"type": "bigint", "index": 3, "name": "count_public_agent_comments", "comment": null}, "count_agent_comments": {"type": "bigint", "index": 4, "name": "count_agent_comments", "comment": null}, "count_end_user_comments": {"type": "bigint", "index": 5, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "bigint", "index": 6, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "bigint", "index": 7, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "bigint", "index": 8, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "bigint", "index": 9, "name": "count_ticket_handoffs", "comment": null}, "count_agent_replies": {"type": "bigint", "index": 10, "name": "count_agent_replies", "comment": null}, "is_one_touch_resolution": {"type": "boolean", "index": 11, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "boolean", "index": 12, "name": "is_two_touch_resolution", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__comment_metrics"}, "model.zendesk.int_zendesk__field_calendar_spine": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"date_day": {"type": "date", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "text", "index": 3, "name": "ticket_day_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_calendar_spine"}, "model.zendesk.int_zendesk__field_history_pivot": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_pivot", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "date_day": {"type": "date", "index": 2, "name": "date_day", "comment": null}, "status": {"type": "text", "index": 3, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 4, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 5, "name": "priority", "comment": null}, "ticket_day_id": {"type": "text", "index": 6, "name": "ticket_day_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_pivot"}, "model.zendesk.int_zendesk__field_history_scd": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_scd", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"valid_from": {"type": "date", "index": 1, "name": "valid_from", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "text", "index": 3, "name": "ticket_day_id", "comment": null}, "status": {"type": "text", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_scd"}, "model.zendesk.int_zendesk__latest_ticket_form": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_form_id": {"type": "bigint", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "text", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "latest_form_index": {"type": "bigint", "index": 7, "name": "latest_form_index", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__latest_ticket_form"}, "model.zendesk.int_zendesk__organization_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__organization_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "integer", "index": 6, "name": "external_id", "comment": null}, "organization_tags": {"type": "text", "index": 7, "name": "organization_tags", "comment": null}, "domain_names": {"type": "text", "index": 8, "name": "domain_names", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__organization_aggregates"}, "model.zendesk.int_zendesk__reply_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "sla_schedule_start_at": {"type": "timestamp without time zone", "index": 6, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_schedule_end_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "sum_lapsed_business_minutes": {"type": "numeric", "index": 9, "name": "sum_lapsed_business_minutes", "comment": null}, "in_business_hours": {"type": "boolean", "index": 10, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_breach_at", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 12, "name": "is_breached_during_schedule", "comment": null}, "total_schedule_weekly_business_minutes": {"type": "numeric", "index": 13, "name": "total_schedule_weekly_business_minutes", "comment": null}, "sla_breach_exact_time": {"type": "timestamp without time zone", "index": 14, "name": "sla_breach_exact_time", "comment": null}, "week_number": {"type": "integer", "index": 15, "name": "week_number", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours"}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "text", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "text", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "bigint", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "text", "index": 10, "name": "sla_policy_name", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours"}, "model.zendesk.int_zendesk__reply_time_combined": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_combined", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "sum_lapsed_business_minutes": {"type": "numeric", "index": 6, "name": "sum_lapsed_business_minutes", "comment": null}, "target": {"type": "integer", "index": 7, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 8, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 9, "name": "sla_breach_at", "comment": null}, "week_number": {"type": "numeric", "index": 10, "name": "week_number", "comment": null}, "sla_schedule_start_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "timestamp without time zone", "index": 12, "name": "sla_schedule_end_at", "comment": null}, "agent_reply_at": {"type": "timestamp without time zone", "index": 13, "name": "agent_reply_at", "comment": null}, "next_solved_at": {"type": "timestamp without time zone", "index": 14, "name": "next_solved_at", "comment": null}, "day_index": {"type": "bigint", "index": 15, "name": "day_index", "comment": null}, "next_schedule_start": {"type": "timestamp without time zone", "index": 16, "name": "next_schedule_start", "comment": null}, "first_sla_breach_at": {"type": "timestamp without time zone", "index": 17, "name": "first_sla_breach_at", "comment": null}, "sum_lapsed_business_minutes_new": {"type": "numeric", "index": 18, "name": "sum_lapsed_business_minutes_new", "comment": null}, "total_runtime_minutes": {"type": "double precision", "index": 19, "name": "total_runtime_minutes", "comment": null}, "current_time_check": {"type": "timestamp with time zone", "index": 20, "name": "current_time_check", "comment": null}, "updated_sla_policy_starts_at": {"type": "timestamp without time zone", "index": 21, "name": "updated_sla_policy_starts_at", "comment": null}, "is_stale_sla_policy": {"type": "boolean", "index": 22, "name": "is_stale_sla_policy", "comment": null}, "is_sla_breached": {"type": "boolean", "index": 23, "name": "is_sla_breached", "comment": null}, "total_new_minutes": {"type": "double precision", "index": 24, "name": "total_new_minutes", "comment": null}, "sla_update_at": {"type": "timestamp without time zone", "index": 25, "name": "sla_update_at", "comment": null}, "sla_elapsed_time": {"type": "double precision", "index": 26, "name": "sla_elapsed_time", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_combined"}, "model.zendesk.int_zendesk__requester_updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "requester_id": {"type": "bigint", "index": 2, "name": "requester_id", "comment": null}, "last_updated": {"type": "timestamp without time zone", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "bigint", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_updates"}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "integer", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "integer", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "integer", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "bigint", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "bigint", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "bigint", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "numeric", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "numeric", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "numeric", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "numeric", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "numeric", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours"}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "double precision", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "double precision", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "double precision", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "double precision", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"}, "model.zendesk.int_zendesk__schedule_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "schedule_id_index": {"type": "bigint", "index": 2, "name": "schedule_id_index", "comment": null}, "start_time": {"type": "integer", "index": 3, "name": "start_time", "comment": null}, "end_time": {"type": "integer", "index": 4, "name": "end_time", "comment": null}, "valid_from": {"type": "date", "index": 5, "name": "valid_from", "comment": null}, "valid_until": {"type": "date", "index": 6, "name": "valid_until", "comment": null}, "day_of_week": {"type": "text", "index": 7, "name": "day_of_week", "comment": null}, "day_of_week_number": {"type": "integer", "index": 8, "name": "day_of_week_number", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_history"}, "model.zendesk.int_zendesk__schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_holiday", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"holiday_name": {"type": "text", "index": 1, "name": "holiday_name", "comment": null}, "schedule_id": {"type": "text", "index": 2, "name": "schedule_id", "comment": null}, "holiday_valid_from": {"type": "timestamp without time zone", "index": 3, "name": "holiday_valid_from", "comment": null}, "holiday_valid_until": {"type": "timestamp without time zone", "index": 4, "name": "holiday_valid_until", "comment": null}, "holiday_starting_sunday": {"type": "timestamp without time zone", "index": 5, "name": "holiday_starting_sunday", "comment": null}, "holiday_ending_sunday": {"type": "timestamp without time zone", "index": 6, "name": "holiday_ending_sunday", "comment": null}, "holiday_weeks_spanned": {"type": "integer", "index": 7, "name": "holiday_weeks_spanned", "comment": null}, "holiday_date": {"type": "timestamp without time zone", "index": 8, "name": "holiday_date", "comment": null}, "holiday_start_or_end": {"type": "text", "index": 9, "name": "holiday_start_or_end", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_holiday"}, "model.zendesk.int_zendesk__schedule_spine": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_spine", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "valid_from": {"type": "timestamp without time zone", "index": 2, "name": "valid_from", "comment": null}, "valid_until": {"type": "timestamp without time zone", "index": 3, "name": "valid_until", "comment": null}, "start_time_utc": {"type": "bigint", "index": 4, "name": "start_time_utc", "comment": null}, "end_time_utc": {"type": "bigint", "index": 5, "name": "end_time_utc", "comment": null}, "change_type": {"type": "text", "index": 6, "name": "change_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_spine"}, "model.zendesk.int_zendesk__schedule_timezones": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_timezones", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "schedule_id_index": {"type": "bigint", "index": 2, "name": "schedule_id_index", "comment": null}, "time_zone": {"type": "text", "index": 3, "name": "time_zone", "comment": null}, "schedule_name": {"type": "text", "index": 4, "name": "schedule_name", "comment": null}, "offset_minutes": {"type": "integer", "index": 5, "name": "offset_minutes", "comment": null}, "start_time_utc": {"type": "bigint", "index": 6, "name": "start_time_utc", "comment": null}, "end_time_utc": {"type": "bigint", "index": 7, "name": "end_time_utc", "comment": null}, "schedule_valid_from": {"type": "timestamp without time zone", "index": 8, "name": "schedule_valid_from", "comment": null}, "schedule_valid_until": {"type": "timestamp without time zone", "index": 9, "name": "schedule_valid_until", "comment": null}, "schedule_starting_sunday": {"type": "timestamp without time zone", "index": 10, "name": "schedule_starting_sunday", "comment": null}, "schedule_ending_sunday": {"type": "timestamp without time zone", "index": 11, "name": "schedule_ending_sunday", "comment": null}, "change_type": {"type": "text", "index": 12, "name": "change_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_timezones"}, "model.zendesk.int_zendesk__sla_policy_applied": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "text", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "text", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "bigint", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "text", "index": 10, "name": "sla_policy_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__sla_policy_applied"}, "model.zendesk.int_zendesk__ticket_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_aggregates"}, "model.zendesk.int_zendesk__ticket_comment_document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_comment_id": {"type": "bigint", "index": 1, "name": "ticket_comment_id", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "comment_time": {"type": "timestamp without time zone", "index": 3, "name": "comment_time", "comment": null}, "comment_markdown": {"type": "text", "index": 4, "name": "comment_markdown", "comment": null}, "comment_tokens": {"type": "integer", "index": 5, "name": "comment_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_comment_document"}, "model.zendesk.int_zendesk__ticket_comment_documents_grouped": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_documents_grouped", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "integer", "index": 1, "name": "ticket_id", "comment": null}, "chunk_index": {"type": "integer", "index": 2, "name": "chunk_index", "comment": null}, "comments_group_markdown": {"type": "text", "index": 3, "name": "comments_group_markdown", "comment": null}, "chunk_tokens": {"type": "bigint", "index": 4, "name": "chunk_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_comment_documents_grouped"}, "model.zendesk.int_zendesk__ticket_document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_markdown": {"type": "text", "index": 2, "name": "ticket_markdown", "comment": null}, "ticket_tokens": {"type": "integer", "index": 3, "name": "ticket_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_document"}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "first_agent_assignment_date": {"type": "timestamp without time zone", "index": 2, "name": "first_agent_assignment_date", "comment": null}, "first_assignee_id": {"type": "text", "index": 3, "name": "first_assignee_id", "comment": null}, "last_agent_assignment_date": {"type": "timestamp without time zone", "index": 4, "name": "last_agent_assignment_date", "comment": null}, "last_assignee_id": {"type": "text", "index": 5, "name": "last_assignee_id", "comment": null}, "assignee_stations_count": {"type": "bigint", "index": 6, "name": "assignee_stations_count", "comment": null}, "unique_assignee_count": {"type": "bigint", "index": 7, "name": "unique_assignee_count", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "double precision", "index": 8, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee"}, "model.zendesk.int_zendesk__ticket_historical_group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "group_stations_count": {"type": "bigint", "index": 2, "name": "group_stations_count", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_group"}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "latest_satisfaction_reason": {"type": "text", "index": 2, "name": "latest_satisfaction_reason", "comment": null}, "latest_satisfaction_comment": {"type": "text", "index": 3, "name": "latest_satisfaction_comment", "comment": null}, "first_satisfaction_score": {"type": "text", "index": 4, "name": "first_satisfaction_score", "comment": null}, "latest_satisfaction_score": {"type": "text", "index": 5, "name": "latest_satisfaction_score", "comment": null}, "count_satisfaction_scores": {"type": "bigint", "index": 6, "name": "count_satisfaction_scores", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 7, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 8, "name": "is_bad_to_good_satisfaction_score", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction"}, "model.zendesk.int_zendesk__ticket_historical_status": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "status_duration_calendar_minutes": {"type": "double precision", "index": 4, "name": "status_duration_calendar_minutes", "comment": null}, "status": {"type": "text", "index": 5, "name": "status", "comment": null}, "ticket_status_counter": {"type": "bigint", "index": 6, "name": "ticket_status_counter", "comment": null}, "unique_status_counter": {"type": "bigint", "index": 7, "name": "unique_status_counter", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_status"}, "model.zendesk.int_zendesk__ticket_schedules": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_schedules", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "schedule_id": {"type": "text", "index": 2, "name": "schedule_id", "comment": null}, "schedule_created_at": {"type": "timestamp without time zone", "index": 3, "name": "schedule_created_at", "comment": null}, "schedule_invalidated_at": {"type": "timestamp with time zone", "index": 4, "name": "schedule_invalidated_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_schedules"}, "model.zendesk.int_zendesk__updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "text", "index": 2, "name": "field_name", "comment": null}, "value": {"type": "text", "index": 3, "name": "value", "comment": null}, "is_public": {"type": "boolean", "index": 4, "name": "is_public", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 6, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 7, "name": "valid_ending_at", "comment": null}, "ticket_created_date": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__updates"}, "model.zendesk.int_zendesk__user_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__user_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "bigint", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 5, "name": "last_login_at", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "email": {"type": "text", "index": 8, "name": "email", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}, "organization_id": {"type": "bigint", "index": 10, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 11, "name": "phone", "comment": null}, "role": {"type": "text", "index": 12, "name": "role", "comment": null}, "ticket_restriction": {"type": "text", "index": 13, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 14, "name": "time_zone", "comment": null}, "locale": {"type": "text", "index": 15, "name": "locale", "comment": null}, "is_active": {"type": "boolean", "index": 16, "name": "is_active", "comment": null}, "is_suspended": {"type": "boolean", "index": 17, "name": "is_suspended", "comment": null}, "user_tags": {"type": "text", "index": 18, "name": "user_tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__user_aggregates"}, "model.zendesk_source.stg_zendesk__audit_log": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"audit_log_id": {"type": "text", "index": 1, "name": "audit_log_id", "comment": null}, "action": {"type": "text", "index": 2, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 3, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 4, "name": "change_description", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 6, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 7, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 8, "name": "source_type", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 9, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__audit_log"}, "model.zendesk_source.stg_zendesk__audit_log_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "integer", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "action": {"type": "text", "index": 3, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 4, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 5, "name": "change_description", "comment": null}, "created_at": {"type": "text", "index": 6, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 7, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 8, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 9, "name": "source_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__audit_log_tmp"}, "model.zendesk_source.stg_zendesk__brand": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"brand_id": {"type": "bigint", "index": 1, "name": "brand_id", "comment": null}, "brand_url": {"type": "text", "index": 2, "name": "brand_url", "comment": null}, "name": {"type": "text", "index": 3, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 4, "name": "subdomain", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand"}, "model.zendesk_source.stg_zendesk__brand_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp"}, "model.zendesk_source.stg_zendesk__daylight_time": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"daylight_end_utc": {"type": "timestamp without time zone", "index": 1, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 2, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 3, "name": "daylight_start_utc", "comment": null}, "time_zone": {"type": "text", "index": 4, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 5, "name": "year", "comment": null}, "daylight_offset_minutes": {"type": "integer", "index": 6, "name": "daylight_offset_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time"}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp"}, "model.zendesk_source.stg_zendesk__domain_name": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "domain_name": {"type": "text", "index": 2, "name": "domain_name", "comment": null}, "index": {"type": "integer", "index": 3, "name": "index", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name"}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp"}, "model.zendesk_source.stg_zendesk__group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"group_id": {"type": "bigint", "index": 1, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 2, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group"}, "model.zendesk_source.stg_zendesk__group_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group_tmp"}, "model.zendesk_source.stg_zendesk__organization": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "integer", "index": 6, "name": "external_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization"}, "model.zendesk_source.stg_zendesk__organization_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag"}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp"}, "model.zendesk_source.stg_zendesk__organization_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp"}, "model.zendesk_source.stg_zendesk__schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "end_time": {"type": "bigint", "index": 2, "name": "end_time", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "schedule_name": {"type": "text", "index": 4, "name": "schedule_name", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "time_zone": {"type": "text", "index": 6, "name": "time_zone", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule"}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"_fivetran_deleted": {"type": "boolean", "index": 1, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "holiday_end_date_at": {"type": "timestamp without time zone", "index": 3, "name": "holiday_end_date_at", "comment": null}, "holiday_id": {"type": "text", "index": 4, "name": "holiday_id", "comment": null}, "holiday_name": {"type": "text", "index": 5, "name": "holiday_name", "comment": null}, "schedule_id": {"type": "text", "index": 6, "name": "schedule_id", "comment": null}, "holiday_start_date_at": {"type": "timestamp without time zone", "index": 7, "name": "holiday_start_date_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday"}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp"}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket"}, "model.zendesk_source.stg_zendesk__ticket_comment": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_comment_id": {"type": "bigint", "index": 1, "name": "ticket_comment_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "body": {"type": "text", "index": 4, "name": "body", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "is_public": {"type": "boolean", "index": 6, "name": "is_public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "user_id": {"type": "bigint", "index": 8, "name": "user_id", "comment": null}, "is_facebook_comment": {"type": "boolean", "index": 9, "name": "is_facebook_comment", "comment": null}, "is_tweet": {"type": "boolean", "index": 10, "name": "is_tweet", "comment": null}, "is_voice_comment": {"type": "boolean", "index": 11, "name": "is_voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment"}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp"}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "text", "index": 2, "name": "field_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 4, "name": "valid_ending_at", "comment": null}, "value": {"type": "text", "index": 5, "name": "value", "comment": null}, "user_id": {"type": "bigint", "index": 6, "name": "user_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history"}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_form_id": {"type": "bigint", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "text", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history"}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "schedule_id": {"type": "text", "index": 3, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule"}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "integer", "index": 1, "name": "ticket_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag"}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp"}, "model.zendesk_source.stg_zendesk__time_zone": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"standard_offset": {"type": "text", "index": 1, "name": "standard_offset", "comment": null}, "time_zone": {"type": "text", "index": 2, "name": "time_zone", "comment": null}, "standard_offset_minutes": {"type": "integer", "index": 3, "name": "standard_offset_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone"}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp"}, "model.zendesk_source.stg_zendesk__user": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "bigint", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 5, "name": "last_login_at", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "email": {"type": "text", "index": 8, "name": "email", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}, "organization_id": {"type": "bigint", "index": 10, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 11, "name": "phone", "comment": null}, "role": {"type": "text", "index": 12, "name": "role", "comment": null}, "ticket_restriction": {"type": "text", "index": 13, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 14, "name": "time_zone", "comment": null}, "locale": {"type": "text", "index": 15, "name": "locale", "comment": null}, "is_active": {"type": "boolean", "index": 16, "name": "is_active", "comment": null}, "is_suspended": {"type": "boolean", "index": 17, "name": "is_suspended", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user"}, "model.zendesk_source.stg_zendesk__user_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag"}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp"}, "model.zendesk_source.stg_zendesk__user_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tmp"}, "model.zendesk.zendesk__document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"document_id": {"type": "text", "index": 1, "name": "document_id", "comment": null}, "chunk_index": {"type": "integer", "index": 2, "name": "chunk_index", "comment": null}, "chunk_tokens_approximate": {"type": "bigint", "index": 3, "name": "chunk_tokens_approximate", "comment": null}, "chunk": {"type": "text", "index": 4, "name": "chunk", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__document"}, "model.zendesk.zendesk__sla_policies": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__sla_policies", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"sla_event_id": {"type": "text", "index": 1, "name": "sla_event_id", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 3, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 4, "name": "metric", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 7, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 8, "name": "sla_breach_at", "comment": null}, "sla_elapsed_time": {"type": "double precision", "index": 9, "name": "sla_elapsed_time", "comment": null}, "is_active_sla": {"type": "boolean", "index": 10, "name": "is_active_sla", "comment": null}, "is_sla_breach": {"type": "boolean", "index": 11, "name": "is_sla_breach", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__sla_policies"}, "model.zendesk.zendesk__ticket_backlog": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_backlog", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"date_day": {"type": "date", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "status": {"type": "text", "index": 3, "name": "status", "comment": null}, "created_channel": {"type": "text", "index": 4, "name": "created_channel", "comment": null}, "assignee_name": {"type": "text", "index": 5, "name": "assignee_name", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_backlog"}, "model.zendesk.zendesk__ticket_enriched": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_enriched", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "text", "index": 33, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "bigint", "index": 34, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "text", "index": 35, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "text", "index": 36, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "text", "index": 37, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "text", "index": 38, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 39, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 40, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "text", "index": 41, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "text", "index": 42, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "bigint", "index": 43, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "timestamp without time zone", "index": 44, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "timestamp without time zone", "index": 45, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "text", "index": 46, "name": "requester_role", "comment": null}, "requester_email": {"type": "text", "index": 47, "name": "requester_email", "comment": null}, "requester_name": {"type": "text", "index": 48, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "boolean", "index": 49, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "text", "index": 50, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "text", "index": 51, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "bigint", "index": 52, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "timestamp without time zone", "index": 53, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "timestamp without time zone", "index": 54, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "bigint", "index": 55, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "text", "index": 56, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "text", "index": 57, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "integer", "index": 58, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "timestamp without time zone", "index": 59, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "timestamp without time zone", "index": 60, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "bigint", "index": 61, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "text", "index": 62, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "boolean", "index": 63, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "text", "index": 64, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "text", "index": 65, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "boolean", "index": 66, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "text", "index": 67, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "text", "index": 68, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "bigint", "index": 69, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "text", "index": 70, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "text", "index": 71, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "text", "index": 72, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "boolean", "index": 73, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "text", "index": 74, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "text", "index": 75, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "bigint", "index": 76, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "timestamp without time zone", "index": 77, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "timestamp without time zone", "index": 78, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "text", "index": 79, "name": "group_name", "comment": null}, "organization_name": {"type": "text", "index": 80, "name": "organization_name", "comment": null}, "requester_tag": {"type": "text", "index": 81, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "text", "index": 82, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "text", "index": 83, "name": "assignee_tag", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_enriched"}, "model.zendesk.zendesk__ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_field_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_day_id": {"type": "text", "index": 1, "name": "ticket_day_id", "comment": null}, "date_day": {"type": "date", "index": 2, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 3, "name": "ticket_id", "comment": null}, "status": {"type": "text", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_field_history"}, "model.zendesk.zendesk__ticket_metrics": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_metrics", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "text", "index": 33, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "bigint", "index": 34, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "text", "index": 35, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "text", "index": 36, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "text", "index": 37, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "text", "index": 38, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 39, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 40, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "text", "index": 41, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "text", "index": 42, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "bigint", "index": 43, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "timestamp without time zone", "index": 44, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "timestamp without time zone", "index": 45, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "text", "index": 46, "name": "requester_role", "comment": null}, "requester_email": {"type": "text", "index": 47, "name": "requester_email", "comment": null}, "requester_name": {"type": "text", "index": 48, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "boolean", "index": 49, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "text", "index": 50, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "text", "index": 51, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "bigint", "index": 52, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "timestamp without time zone", "index": 53, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "timestamp without time zone", "index": 54, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "bigint", "index": 55, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "text", "index": 56, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "text", "index": 57, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "integer", "index": 58, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "timestamp without time zone", "index": 59, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "timestamp without time zone", "index": 60, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "bigint", "index": 61, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "text", "index": 62, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "boolean", "index": 63, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "text", "index": 64, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "text", "index": 65, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "boolean", "index": 66, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "text", "index": 67, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "text", "index": 68, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "bigint", "index": 69, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "text", "index": 70, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "text", "index": 71, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "text", "index": 72, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "boolean", "index": 73, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "text", "index": 74, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "text", "index": 75, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "bigint", "index": 76, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "timestamp without time zone", "index": 77, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "timestamp without time zone", "index": 78, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "text", "index": 79, "name": "group_name", "comment": null}, "organization_name": {"type": "text", "index": 80, "name": "organization_name", "comment": null}, "requester_tag": {"type": "text", "index": 81, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "text", "index": 82, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "text", "index": 83, "name": "assignee_tag", "comment": null}, "first_reply_time_calendar_minutes": {"type": "double precision", "index": 84, "name": "first_reply_time_calendar_minutes", "comment": null}, "total_reply_time_calendar_minutes": {"type": "double precision", "index": 85, "name": "total_reply_time_calendar_minutes", "comment": null}, "count_agent_comments": {"type": "bigint", "index": 86, "name": "count_agent_comments", "comment": null}, "count_public_agent_comments": {"type": "bigint", "index": 87, "name": "count_public_agent_comments", "comment": null}, "count_end_user_comments": {"type": "bigint", "index": 88, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "bigint", "index": 89, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "bigint", "index": 90, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "bigint", "index": 91, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "bigint", "index": 92, "name": "count_ticket_handoffs", "comment": null}, "ticket_last_comment_date": {"type": "timestamp without time zone", "index": 93, "name": "ticket_last_comment_date", "comment": null}, "unique_assignee_count": {"type": "bigint", "index": 94, "name": "unique_assignee_count", "comment": null}, "assignee_stations_count": {"type": "bigint", "index": 95, "name": "assignee_stations_count", "comment": null}, "group_stations_count": {"type": "bigint", "index": 96, "name": "group_stations_count", "comment": null}, "first_assignee_id": {"type": "text", "index": 97, "name": "first_assignee_id", "comment": null}, "last_assignee_id": {"type": "text", "index": 98, "name": "last_assignee_id", "comment": null}, "first_agent_assignment_date": {"type": "timestamp without time zone", "index": 99, "name": "first_agent_assignment_date", "comment": null}, "last_agent_assignment_date": {"type": "timestamp without time zone", "index": 100, "name": "last_agent_assignment_date", "comment": null}, "first_solved_at": {"type": "timestamp without time zone", "index": 101, "name": "first_solved_at", "comment": null}, "last_solved_at": {"type": "timestamp without time zone", "index": 102, "name": "last_solved_at", "comment": null}, "first_assignment_to_resolution_calendar_minutes": {"type": "double precision", "index": 103, "name": "first_assignment_to_resolution_calendar_minutes", "comment": null}, "last_assignment_to_resolution_calendar_minutes": {"type": "double precision", "index": 104, "name": "last_assignment_to_resolution_calendar_minutes", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "double precision", "index": 105, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}, "first_resolution_calendar_minutes": {"type": "double precision", "index": 106, "name": "first_resolution_calendar_minutes", "comment": null}, "final_resolution_calendar_minutes": {"type": "double precision", "index": 107, "name": "final_resolution_calendar_minutes", "comment": null}, "count_resolutions": {"type": "bigint", "index": 108, "name": "count_resolutions", "comment": null}, "count_reopens": {"type": "bigint", "index": 109, "name": "count_reopens", "comment": null}, "ticket_deleted_count": {"type": "bigint", "index": 110, "name": "ticket_deleted_count", "comment": null}, "total_ticket_recoveries": {"type": "bigint", "index": 111, "name": "total_ticket_recoveries", "comment": null}, "last_status_assignment_date": {"type": "timestamp without time zone", "index": 112, "name": "last_status_assignment_date", "comment": null}, "new_status_duration_in_calendar_minutes": {"type": "double precision", "index": 113, "name": "new_status_duration_in_calendar_minutes", "comment": null}, "open_status_duration_in_calendar_minutes": {"type": "double precision", "index": 114, "name": "open_status_duration_in_calendar_minutes", "comment": null}, "agent_wait_time_in_calendar_minutes": {"type": "double precision", "index": 115, "name": "agent_wait_time_in_calendar_minutes", "comment": null}, "requester_wait_time_in_calendar_minutes": {"type": "double precision", "index": 116, "name": "requester_wait_time_in_calendar_minutes", "comment": null}, "solve_time_in_calendar_minutes": {"type": "double precision", "index": 117, "name": "solve_time_in_calendar_minutes", "comment": null}, "agent_work_time_in_calendar_minutes": {"type": "double precision", "index": 118, "name": "agent_work_time_in_calendar_minutes", "comment": null}, "on_hold_time_in_calendar_minutes": {"type": "double precision", "index": 119, "name": "on_hold_time_in_calendar_minutes", "comment": null}, "total_agent_replies": {"type": "bigint", "index": 120, "name": "total_agent_replies", "comment": null}, "requester_last_login_age_minutes": {"type": "double precision", "index": 121, "name": "requester_last_login_age_minutes", "comment": null}, "assignee_last_login_age_minutes": {"type": "double precision", "index": 122, "name": "assignee_last_login_age_minutes", "comment": null}, "unsolved_ticket_age_minutes": {"type": "double precision", "index": 123, "name": "unsolved_ticket_age_minutes", "comment": null}, "unsolved_ticket_age_since_update_minutes": {"type": "double precision", "index": 124, "name": "unsolved_ticket_age_since_update_minutes", "comment": null}, "is_one_touch_resolution": {"type": "boolean", "index": 125, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "boolean", "index": 126, "name": "is_two_touch_resolution", "comment": null}, "is_multi_touch_resolution": {"type": "boolean", "index": 127, "name": "is_multi_touch_resolution", "comment": null}, "first_resolution_business_minutes": {"type": "numeric", "index": 128, "name": "first_resolution_business_minutes", "comment": null}, "full_resolution_business_minutes": {"type": "numeric", "index": 129, "name": "full_resolution_business_minutes", "comment": null}, "first_reply_time_business_minutes": {"type": "numeric", "index": 130, "name": "first_reply_time_business_minutes", "comment": null}, "agent_wait_time_in_business_minutes": {"type": "numeric", "index": 131, "name": "agent_wait_time_in_business_minutes", "comment": null}, "requester_wait_time_in_business_minutes": {"type": "numeric", "index": 132, "name": "requester_wait_time_in_business_minutes", "comment": null}, "solve_time_in_business_minutes": {"type": "numeric", "index": 133, "name": "solve_time_in_business_minutes", "comment": null}, "agent_work_time_in_business_minutes": {"type": "numeric", "index": 134, "name": "agent_work_time_in_business_minutes", "comment": null}, "on_hold_time_in_business_minutes": {"type": "numeric", "index": 135, "name": "on_hold_time_in_business_minutes", "comment": null}, "new_status_duration_in_business_minutes": {"type": "numeric", "index": 136, "name": "new_status_duration_in_business_minutes", "comment": null}, "open_status_duration_in_business_minutes": {"type": "numeric", "index": 137, "name": "open_status_duration_in_business_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_metrics"}, "model.zendesk.zendesk__ticket_summary": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_summary", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_count": {"type": "bigint", "index": 1, "name": "user_count", "comment": null}, "active_agent_count": {"type": "bigint", "index": 2, "name": "active_agent_count", "comment": null}, "deleted_user_count": {"type": "bigint", "index": 3, "name": "deleted_user_count", "comment": null}, "end_user_count": {"type": "bigint", "index": 4, "name": "end_user_count", "comment": null}, "suspended_user_count": {"type": "bigint", "index": 5, "name": "suspended_user_count", "comment": null}, "new_ticket_count": {"type": "bigint", "index": 6, "name": "new_ticket_count", "comment": null}, "on_hold_ticket_count": {"type": "bigint", "index": 7, "name": "on_hold_ticket_count", "comment": null}, "open_ticket_count": {"type": "bigint", "index": 8, "name": "open_ticket_count", "comment": null}, "pending_ticket_count": {"type": "bigint", "index": 9, "name": "pending_ticket_count", "comment": null}, "solved_ticket_count": {"type": "bigint", "index": 10, "name": "solved_ticket_count", "comment": null}, "problem_ticket_count": {"type": "bigint", "index": 11, "name": "problem_ticket_count", "comment": null}, "assigned_ticket_count": {"type": "bigint", "index": 12, "name": "assigned_ticket_count", "comment": null}, "reassigned_ticket_count": {"type": "bigint", "index": 13, "name": "reassigned_ticket_count", "comment": null}, "reopened_ticket_count": {"type": "bigint", "index": 14, "name": "reopened_ticket_count", "comment": null}, "surveyed_satisfaction_ticket_count": {"type": "bigint", "index": 15, "name": "surveyed_satisfaction_ticket_count", "comment": null}, "unassigned_unsolved_ticket_count": {"type": "bigint", "index": 16, "name": "unassigned_unsolved_ticket_count", "comment": null}, "unreplied_ticket_count": {"type": "bigint", "index": 17, "name": "unreplied_ticket_count", "comment": null}, "unreplied_unsolved_ticket_count": {"type": "bigint", "index": 18, "name": "unreplied_unsolved_ticket_count", "comment": null}, "unsolved_ticket_count": {"type": "bigint", "index": 19, "name": "unsolved_ticket_count", "comment": null}, "recovered_ticket_count": {"type": "bigint", "index": 20, "name": "recovered_ticket_count", "comment": null}, "deleted_ticket_count": {"type": "bigint", "index": 21, "name": "deleted_ticket_count", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_summary"}}, "sources": {"source.zendesk_source.zendesk.audit_log": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "audit_log_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "integer", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "action": {"type": "text", "index": 3, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 4, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 5, "name": "change_description", "comment": null}, "created_at": {"type": "text", "index": 6, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 7, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 8, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 9, "name": "source_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.audit_log"}, "source.zendesk_source.zendesk.brand": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "brand_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.brand"}, "source.zendesk_source.zendesk.daylight_time": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "daylight_time_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.daylight_time"}, "source.zendesk_source.zendesk.domain_name": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "domain_name_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.domain_name"}, "source.zendesk_source.zendesk.group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "group_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.group"}, "source.zendesk_source.zendesk.organization": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization"}, "source.zendesk_source.zendesk.organization_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization_tag"}, "source.zendesk_source.zendesk.schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule"}, "source.zendesk_source.zendesk.schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_holiday_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule_holiday"}, "source.zendesk_source.zendesk.ticket_comment": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_comment_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_comment"}, "source.zendesk_source.zendesk.ticket": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket"}, "source.zendesk_source.zendesk.ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_field_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_field_history"}, "source.zendesk_source.zendesk.ticket_form_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_form_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_form_history"}, "source.zendesk_source.zendesk.ticket_schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_schedule"}, "source.zendesk_source.zendesk.ticket_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_tag"}, "source.zendesk_source.zendesk.time_zone": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "time_zone_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.time_zone"}, "source.zendesk_source.zendesk.user": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user"}, "source.zendesk_source.zendesk.user_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user_tag"}}, "errors": null} \ No newline at end of file diff --git a/docs/manifest.json b/docs/manifest.json index f076a6c7..bdf0d508 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -1 +1 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": "1.8.3", "generated_at": "2024-09-03T18:15:30.833916Z", "invocation_id": "ab89e8de-0760-4824-96db-0e8bd67c9f64", "env": {}, "project_name": "zendesk_integration_tests", "project_id": "b8a12ac1bacdf035438fc7646299ce11", "user_id": "8268eefe-e8f7-472e-ab2a-a92f0135d76d", "send_anonymous_usage_stats": true, "adapter_type": "postgres"}, "nodes": {"seed.zendesk_integration_tests.organization_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data.csv", "original_file_path": "seeds/organization_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data", "fqn": ["zendesk_integration_tests", "organization_tag_data"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "adebcb3827e908ab449435adc556aadf587cfad4103cab2c840d3d9fddc16e20"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1725387303.0642428, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_comment_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_comment_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_comment_data.csv", "original_file_path": "seeds/ticket_comment_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_comment_data", "fqn": ["zendesk_integration_tests", "ticket_comment_data"], "alias": "ticket_comment_data", "checksum": {"name": "sha256", "checksum": "033e18229b848b4809699f04f39605771faf437e583a1aefe1af5625f0ac7de5"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "user_id": "bigint", "created": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created": "timestamp"}}, "created_at": 1725387303.065467, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_comment_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_holiday_data": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_holiday_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_holiday_data.csv", "original_file_path": "seeds/schedule_holiday_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data", "fqn": ["zendesk_integration_tests", "schedule_holiday_data"], "alias": "schedule_holiday_data", "checksum": {"name": "sha256", "checksum": "f907dea5e2dc21649bf4eae0392add96a884f19f900dc0f2d568141038ba5d28"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "schedule_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1725387303.068429, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_holiday_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.domain_name_data": {"database": "postgres", "schema": "zz_zendesk", "name": "domain_name_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "domain_name_data.csv", "original_file_path": "seeds/domain_name_data.csv", "unique_id": "seed.zendesk_integration_tests.domain_name_data", "fqn": ["zendesk_integration_tests", "domain_name_data"], "alias": "domain_name_data", "checksum": {"name": "sha256", "checksum": "3bf711417f9269957353aa9e1ddd28ada8bd74e03128a4b8c94e694a560a09cf"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1725387303.070865, "relation_name": "\"postgres\".\"zz_zendesk\".\"domain_name_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_field_history_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_field_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_field_history_data.csv", "original_file_path": "seeds/ticket_field_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data", "fqn": ["zendesk_integration_tests", "ticket_field_history_data"], "alias": "ticket_field_history_data", "checksum": {"name": "sha256", "checksum": "47c9244103b9a8dc25c5ce75693b8389df92258dde23dae71a09f021cf1b7ab7"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "bigint", "user_id": "bigint", "updated": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "updated": "timestamp"}}, "created_at": 1725387303.073257, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_data.csv", "original_file_path": "seeds/ticket_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_data", "fqn": ["zendesk_integration_tests", "ticket_data"], "alias": "ticket_data", "checksum": {"name": "sha256", "checksum": "effe2837ec0ff3ec59fddc7fce0a5f4a6ff0a69daef5ae904244dcbf34425dae"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "bigint", "brand_id": "bigint", "external_id": "bigint", "forum_topic_id": "bigint", "group_id": "bigint", "organization_id": "bigint", "problem_id": "bigint", "requester_id": "bigint", "submitter_id": "bigint", "ticket_form_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "brand_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "forum_topic_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "group_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "problem_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "requester_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "submitter_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "ticket_form_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1725387303.074528, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.brand_data_postgres": {"database": "postgres", "schema": "zz_zendesk", "name": "brand_data_postgres", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data_postgres.csv", "original_file_path": "seeds/brand_data_postgres.csv", "unique_id": "seed.zendesk_integration_tests.brand_data_postgres", "fqn": ["zendesk_integration_tests", "brand_data_postgres"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "aa338ab31e4a221da8a0ed5040ec921a4d39a7377ae37a7e79b49e1402e490f5"}, "config": {"enabled": true, "alias": "brand_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "alias": "brand_data", "enabled": "{{ true if target.type == 'postgres' else false }}"}, "created_at": 1725387303.075753, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.time_zone_data": {"database": "postgres", "schema": "zz_zendesk", "name": "time_zone_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "time_zone_data.csv", "original_file_path": "seeds/time_zone_data.csv", "unique_id": "seed.zendesk_integration_tests.time_zone_data", "fqn": ["zendesk_integration_tests", "time_zone_data"], "alias": "time_zone_data", "checksum": {"name": "sha256", "checksum": "b02df4f14e54c7deb0b15c40b35196968de4374ceb1cc5ad95986620a506adb2"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1725387303.076877, "relation_name": "\"postgres\".\"zz_zendesk\".\"time_zone_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_schedule_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_schedule_data.csv", "original_file_path": "seeds/ticket_schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data", "fqn": ["zendesk_integration_tests", "ticket_schedule_data"], "alias": "ticket_schedule_data", "checksum": {"name": "sha256", "checksum": "dc4892d18f3730242f5319bb24498d77a4c32a666b6b4d5c0eec0d4dafd7224b"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "bigint", "schedule_id": "bigint", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1725387303.078084, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_schedule_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.daylight_time_data": {"database": "postgres", "schema": "zz_zendesk", "name": "daylight_time_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "daylight_time_data.csv", "original_file_path": "seeds/daylight_time_data.csv", "unique_id": "seed.zendesk_integration_tests.daylight_time_data", "fqn": ["zendesk_integration_tests", "daylight_time_data"], "alias": "daylight_time_data", "checksum": {"name": "sha256", "checksum": "17642d90548c6367ab328762a47066a905e3ba2da8831cd86ef37ac659a38fc9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1725387303.079233, "relation_name": "\"postgres\".\"zz_zendesk\".\"daylight_time_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_data": {"database": "postgres", "schema": "zz_zendesk", "name": "user_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data.csv", "original_file_path": "seeds/user_data.csv", "unique_id": "seed.zendesk_integration_tests.user_data", "fqn": ["zendesk_integration_tests", "user_data"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "9f600c24b84ed0183e88c5aaa4e7e02bd2228115bebc85217f04c97bd5b6dbc9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "external_id": "bigint", "locale_id": "bigint", "organization_id": "bigint", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1725387303.080414, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_data": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_data.csv", "original_file_path": "seeds/schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_data", "fqn": ["zendesk_integration_tests", "schedule_data"], "alias": "schedule_data", "checksum": {"name": "sha256", "checksum": "e2596e44df02b53d13b850f9742084141b7b75755baae603c8d3db6b8354107a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "end_time": "bigint", "start_time": "bigint", "end_time_utc": "bigint", "start_time_utc": "bigint", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1725387303.081644, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_tag_data.csv", "original_file_path": "seeds/ticket_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_tag_data", "fqn": ["zendesk_integration_tests", "ticket_tag_data"], "alias": "ticket_tag_data", "checksum": {"name": "sha256", "checksum": "020b25c3247e21387702778ce0af4e5a5b8b3aee62daaa05f48c643489b57ea0"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1725387303.082797, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.organization_data": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_data.csv", "original_file_path": "seeds/organization_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_data", "fqn": ["zendesk_integration_tests", "organization_data"], "alias": "organization_data", "checksum": {"name": "sha256", "checksum": "b3e00faed1ea214f73182b110c5f55653a5b43f2bc082dcb87f6c63dea5303c3"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1725387303.083984, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_form_history_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_form_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_form_history_data.csv", "original_file_path": "seeds/ticket_form_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data", "fqn": ["zendesk_integration_tests", "ticket_form_history_data"], "alias": "ticket_form_history_data", "checksum": {"name": "sha256", "checksum": "a5b4edef05a0baa9acac87db3eea1ac0ba55865809db778ff458e20b7352c665"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1725387303.085175, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_form_history_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.group_data": {"database": "postgres", "schema": "zz_zendesk", "name": "group_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "group_data.csv", "original_file_path": "seeds/group_data.csv", "unique_id": "seed.zendesk_integration_tests.group_data", "fqn": ["zendesk_integration_tests", "group_data"], "alias": "group_data", "checksum": {"name": "sha256", "checksum": "ded51f1b267e9785ca862ca30656faa2485b5814d834ea35de6892702c3dbd1a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1725387303.0863512, "relation_name": "\"postgres\".\"zz_zendesk\".\"group_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "user_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data.csv", "original_file_path": "seeds/user_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data", "fqn": ["zendesk_integration_tests", "user_tag_data"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "fde0d85263495e783fd6fb342940a4dcd67c39581d55bfc9b28935d24367a096"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "user_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1725387303.087511, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "model.zendesk.zendesk__ticket_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_enriched", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_enriched.sql", "original_file_path": "models/zendesk__ticket_enriched.sql", "unique_id": "model.zendesk.zendesk__ticket_enriched", "fqn": ["zendesk", "zendesk__ticket_enriched"], "alias": "zendesk__ticket_enriched", "checksum": {"name": "sha256", "checksum": "8d5ccce79dd53bd307569a9a086b4205cfebbd616bb74b594766e524a281c244"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk ticket, enriched with data about it's tags, assignees, requester, submitter, organization and group.", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the ticket has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.920539, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"", "raw_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n{% if var('using_ticket_form_history', True) %}\n), latest_ticket_form as (\n\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), latest_satisfaction_ratings as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_satisfaction') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), requester_updates as (\n\n select *\n from {{ ref('int_zendesk__requester_updates') }}\n\n), assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__assignee_updates') }}\n\n), ticket_group as (\n \n select *\n from {{ ref('stg_zendesk__group') }}\n\n), organization as (\n\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n latest_ticket_form.name as ticket_form_name,\n {% endif %}\n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n {% endif %}\n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n requester_org.organization_tags as requester_organization_tags,\n {% endif %}\n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n {% endif %}\n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n {% endif %}\n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "language": "sql", "refs": [{"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}, {"name": "int_zendesk__latest_ticket_form", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_satisfaction", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__requester_updates", "package": null, "version": null}, {"name": "int_zendesk__assignee_updates", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__assignee_updates", "model.zendesk_source.stg_zendesk__group", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_enriched.sql", "compiled": true, "compiled_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n\n), latest_ticket_form as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__latest_ticket_form\"\n\n\n), latest_satisfaction_ratings as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), requester_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_updates\"\n\n), assignee_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__assignee_updates\"\n\n), ticket_group as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\n\n), organization as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n latest_ticket_form.name as ticket_form_name,\n \n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n \n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n requester_org.organization_tags as requester_organization_tags,\n \n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n \n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n \n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n \n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_metrics": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_metrics", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_metrics.sql", "original_file_path": "models/zendesk__ticket_metrics.sql", "unique_id": "model.zendesk.zendesk__ticket_metrics", "fqn": ["zendesk", "zendesk__ticket_metrics"], "alias": "zendesk__ticket_metrics", "checksum": {"name": "sha256", "checksum": "0beb1421df42ed71b84e3cfec7f56029ec6bad71570ab9b4cb4ab712fc753ca6"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk Support ticket, enriched with metrics about reply times, resolution times and work times. Calendar and business hours are supported", "columns": {"first_reply_time_calendar_minutes": {"name": "first_reply_time_calendar_minutes", "description": "The number of calendar minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_reply_time_business_minutes": {"name": "first_reply_time_business_minutes", "description": "The number of business minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_reply_time_calendar_minutes": {"name": "total_reply_time_calendar_minutes", "description": "The combined calendar time between all end-user comments and the next public agent response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_solved_at": {"name": "first_solved_at", "description": "The time the ticket was first in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_solved_at": {"name": "last_solved_at", "description": "The time the ticket was last in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_calendar_minutes": {"name": "first_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "final_resolution_calendar_minutes": {"name": "final_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_one_touch_resolution": {"name": "is_one_touch_resolution", "description": "A boolean field indicating that the ticket has one public agent response and is in solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_business_minutes": {"name": "first_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "full_resolution_business_minutes": {"name": "full_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_business_minutes": {"name": "agent_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_business_minutes": {"name": "requester_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_business_minutes": {"name": "solve_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_business_minutes": {"name": "agent_work_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_business_minutes": {"name": "on_hold_time_in_business_minutes", "description": "The combined number of business minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_business_minutes": {"name": "new_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_business_minutes": {"name": "open_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_calendar_minutes": {"name": "agent_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_calendar_minutes": {"name": "requester_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_calendar_minutes": {"name": "solve_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_calendar_minutes": {"name": "agent_work_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_calendar_minutes": {"name": "on_hold_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_agent_comments": {"name": "count_agent_comments", "description": "Count of agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_agent_comments": {"name": "count_public_agent_comments", "description": "Count of public agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_end_user_comments": {"name": "count_end_user_comments", "description": "Count of end user comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_internal_comments": {"name": "count_internal_comments", "description": "Count of internal comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_comments": {"name": "count_public_comments", "description": "Count of public comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_comments": {"name": "total_comments", "description": "Total count of all comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_ticket_handoffs": {"name": "count_ticket_handoffs", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": [], "dev_snowflake": "Count of distinct internal users who have touched/commented on the ticket."}, "unique_assignee_count": {"name": "unique_assignee_count", "description": "The count of unique assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_stations_count": {"name": "assignee_stations_count", "description": "The total number of assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_stations_count": {"name": "group_stations_count", "description": "The total count of group stations within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignee_id": {"name": "first_assignee_id", "description": "Assignee id of the first agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignee_id": {"name": "last_assignee_id", "description": "Assignee id of the last agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_agent_assignment_date": {"name": "first_agent_assignment_date", "description": "The date the first agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_agent_assignment_date": {"name": "last_agent_assignment_date", "description": "The date the last agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignment_to_resolution_calendar_minutes": {"name": "first_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the first assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignment_to_resolution_calendar_minutes": {"name": "last_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the last assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_resolutions": {"name": "count_resolutions", "description": "The count of ticket resolutions", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_reopens": {"name": "count_reopens", "description": "The count of ticket reopen events", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_calendar_minutes": {"name": "new_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_calendar_minutes": {"name": "open_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_agent_replies": {"name": "total_agent_replies", "description": "The total number of agent replies within the ticket, excluding comments where an agent created the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_age_minutes": {"name": "requester_last_login_age_minutes", "description": "The time in minutes since the ticket requester was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_age_minutes": {"name": "assignee_last_login_age_minutes", "description": "The time in minutes since the ticket assignee was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_minutes": {"name": "unsolved_ticket_age_minutes", "description": "The time in minutes the ticket has been in an unsolved state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_since_update_minutes": {"name": "unsolved_ticket_age_since_update_minutes", "description": "The time in minutes the ticket has been unsolved since the last update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_two_touch_resolution": {"name": "is_two_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_multi_touch_resolution": {"name": "is_multi_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two or more public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_last_comment_date": {"name": "ticket_last_comment_date", "description": "The time the last comment was applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_unassigned_duration_calendar_minutes": {"name": "ticket_unassigned_duration_calendar_minutes", "description": "The time in minutes the ticket was in an unassigned state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_status_assignment_date": {"name": "last_status_assignment_date", "description": "The time the status was last changed on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the ticket has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.935595, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"", "raw_code": "with ticket_enriched as (\n\n select *\n from {{ ref('zendesk__ticket_enriched') }}\n\n), ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_reply_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times_calendar') }}\n\n), ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comment_metrics') }}\n\n), ticket_work_time_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_calendar') }}\n\n-- business hour CTEs\n{% if var('using_schedules', True) %}\n\n), ticket_first_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_resolution_time_business') }}\n\n), ticket_full_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_full_resolution_time_business') }}\n\n), ticket_work_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_business') }}\n\n), ticket_first_reply_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_reply_time_business') }}\n\n{% endif %}\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.requester_last_login_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.assignee_last_login_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.created_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.updated_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n{% if var('using_schedules', True) %}\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n{% else %}\n\n) \n\nselect *\nfrom calendar_hour_metrics\n\n{% endif %}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}, {"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__comment_metrics", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_full_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_reply_time_business", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt.datediff"], "nodes": ["model.zendesk.zendesk__ticket_enriched", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_first_reply_time_business"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n), __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n), __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n), __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n), ticket_enriched as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\n\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_reply_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times_calendar\n\n), ticket_comments as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__comment_metrics\"\n\n), ticket_work_time_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_calendar\n\n-- business hour CTEs\n\n\n), ticket_first_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_resolution_time_business\n\n), ticket_full_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_full_resolution_time_business\n\n), ticket_work_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_business\n\n), ticket_first_reply_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_reply_time_business\n\n\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.requester_last_login_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.requester_last_login_at)::timestamp)))\n /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.assignee_last_login_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.assignee_last_login_at)::timestamp)))\n /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.created_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.created_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.created_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.created_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.updated_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.updated_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.updated_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.updated_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}, {"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "sql": " __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_business", "sql": " __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_summary": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_summary", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_summary.sql", "original_file_path": "models/zendesk__ticket_summary.sql", "unique_id": "model.zendesk.zendesk__ticket_summary", "fqn": ["zendesk", "zendesk__ticket_summary"], "alias": "zendesk__ticket_summary", "checksum": {"name": "sha256", "checksum": "085f6c784b70f6ca6f38a8f3d4defb1debb06049d0bb6fe1b778ad7638d08f2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A single record table containing Zendesk ticket and user summary metrics. These metrics are updated for the current day the model is run.", "columns": {"user_count": {"name": "user_count", "description": "Total count of users created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active_agent_count": {"name": "active_agent_count", "description": "Total count of agents", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_user_count": {"name": "deleted_user_count", "description": "Total deleted user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_user_count": {"name": "end_user_count", "description": "Total end user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended_user_count": {"name": "suspended_user_count", "description": "Total count of users in a suspended state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_ticket_count": {"name": "new_ticket_count", "description": "Total count of tickets in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_ticket_count": {"name": "on_hold_ticket_count", "description": "Total count of tickets in the \"hold\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_ticket_count": {"name": "open_ticket_count", "description": "Total count of tickets in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "pending_ticket_count": {"name": "pending_ticket_count", "description": "Total count of tickets in the \"pending\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solved_ticket_count": {"name": "solved_ticket_count", "description": "Total count of solved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_ticket_count": {"name": "problem_ticket_count", "description": "Total count of tickets labeled as problems", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reassigned_ticket_count": {"name": "reassigned_ticket_count", "description": "Total count of tickets that have been reassigned", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reopened_ticket_count": {"name": "reopened_ticket_count", "description": "Total count of tickets that have been reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "surveyed_satisfaction_ticket_count": {"name": "surveyed_satisfaction_ticket_count", "description": "Total count of tickets that have been surveyed for a satisfaction response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unassigned_unsolved_ticket_count": {"name": "unassigned_unsolved_ticket_count", "description": "Total count of tickets that are unassigned and unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_ticket_count": {"name": "unreplied_ticket_count", "description": "Total count of tickets that have not had a reply", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_unsolved_ticket_count": {"name": "unreplied_unsolved_ticket_count", "description": "Total count of tickets that have not had a reply and are unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_count": {"name": "unsolved_ticket_count", "description": "Total count of unsolved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assigned_ticket_count": {"name": "assigned_ticket_count", "description": "Total count of assigned tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_ticket_count": {"name": "deleted_ticket_count", "description": "Total count of deleted tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recovered_ticket_count": {"name": "recovered_ticket_count", "description": "Total count of tickets that were deleted then reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.9397209, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_summary\"", "raw_code": "with ticket_metrics as (\n select *\n from {{ ref('zendesk__ticket_metrics') }}\n\n), user_table as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), user_sum as (\n select\n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int"], "nodes": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_summary.sql", "compiled": true, "compiled_code": "with ticket_metrics as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\n\n), user_table as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), user_sum as (\n select\n cast(1 as integer) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as integer) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_field_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_field_history.sql", "original_file_path": "models/zendesk__ticket_field_history.sql", "unique_id": "model.zendesk.zendesk__ticket_field_history", "fqn": ["zendesk", "zendesk__ticket_field_history"], "alias": "zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "2fea56dd7631d630021a96594da99a1b65affd7ec6d7a5a913ef3fc0b7759949"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable and the corresponding updater fields defined in the `ticket_field_history_updater_columns` variable.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_day_id": {"name": "ticket_day_id", "description": "The unique key of the table, a surrogate key of date_day and ticket_id.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The assignee id assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1725387303.9238021, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"", "raw_code": "{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month' } if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{%- set change_data_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_scd')) -%}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_scd') }}\n \n {% if is_incremental() %}\n where valid_from >= (select max(date_day) from {{ this }})\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from {{ this }}\n where date_day = (select max(date_day) from {{ this }} )\n\n{% endif %}\n\n), calendar as (\n\n select *\n from {{ ref('int_zendesk__field_calendar_spine') }}\n where date_day <= current_date\n {% if is_incremental() %}\n and date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n {% if is_incremental() %} \n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , coalesce(change_data.{{ col.name }}, most_recent_data.{{ col.name }}) as {{ col.name }}\n {% endfor %}\n \n {% else %}\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , {{ col.name }}\n {% endfor %}\n {% endif %}\n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n {% if is_incremental() %}\n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n {% endif %}\n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n , {{ col.name }}\n -- create a batch/partition once a new value is provided\n , sum( case when {{ col.name }} is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as {{ col.name }}_field_partition\n\n {% endfor %}\n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n -- grab the value that started this batch/partition\n , first_value( {{ col.name }} ) over (\n partition by ticket_id, {{ col.name }}_field_partition \n order by date_day asc rows between unbounded preceding and current row) as {{ col.name }}\n {% endfor %}\n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( {{ col.name }} as {{ dbt.type_string() }} ) = 'is_null' then null else {{ col.name }} end as {{ col.name }}\n {% endfor %}\n\n from fill_values\n\n), surrogate_key as (\n\n select\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_calendar_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.type_string"], "nodes": ["model.zendesk.int_zendesk__field_history_scd", "model.zendesk.int_zendesk__field_calendar_spine"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with change_data as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_scd\"\n \n \n where valid_from >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\")\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"\n where date_day = (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\" )\n\n\n\n), calendar as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\"\n where date_day <= current_date\n \n and date_day >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\")\n \n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n \n \n , coalesce(change_data.status, most_recent_data.status) as status\n \n , coalesce(change_data.assignee_id, most_recent_data.assignee_id) as assignee_id\n \n , coalesce(change_data.priority, most_recent_data.priority) as priority\n \n \n \n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n \n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n \n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n \n , status\n -- create a batch/partition once a new value is provided\n , sum( case when status is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as status_field_partition\n\n \n , assignee_id\n -- create a batch/partition once a new value is provided\n , sum( case when assignee_id is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as assignee_id_field_partition\n\n \n , priority\n -- create a batch/partition once a new value is provided\n , sum( case when priority is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as priority_field_partition\n\n \n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n \n -- grab the value that started this batch/partition\n , first_value( status ) over (\n partition by ticket_id, status_field_partition \n order by date_day asc rows between unbounded preceding and current row) as status\n \n -- grab the value that started this batch/partition\n , first_value( assignee_id ) over (\n partition by ticket_id, assignee_id_field_partition \n order by date_day asc rows between unbounded preceding and current row) as assignee_id\n \n -- grab the value that started this batch/partition\n , first_value( priority ) over (\n partition by ticket_id, priority_field_partition \n order by date_day asc rows between unbounded preceding and current row) as priority\n \n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( status as TEXT ) = 'is_null' then null else status end as status\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( assignee_id as TEXT ) = 'is_null' then null else assignee_id end as assignee_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( priority as TEXT ) = 'is_null' then null else priority end as priority\n \n\n from fill_values\n\n), surrogate_key as (\n\n select\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__sla_policies": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__sla_policies", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__sla_policies.sql", "original_file_path": "models/zendesk__sla_policies.sql", "unique_id": "model.zendesk.zendesk__sla_policies", "fqn": ["zendesk", "zendesk__sla_policies"], "alias": "zendesk__sla_policies", "checksum": {"name": "sha256", "checksum": "450c1289895dff2dce94dbed7926eeaa895ffa8c6a25524f558d9dcd5e7075fa"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents an SLA policy event and additional sla breach and achievement metrics. Calendar and business hour SLA breaches for `first_reply_time`, `next_reply_time`, `requester_wait_time`, and `agent_work_time` are supported. If there is a SLA you would like supported that is not included, please create a feature request.", "columns": {"sla_event_id": {"name": "sla_event_id", "description": "A surrogate key generated from the combination of ticket_id, metric, and sla_applied_at fields", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_policy_name": {"name": "sla_policy_name", "description": "The name of the SLA policy associated with the SLA metric", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "metric": {"name": "metric", "description": "The SLA metric, either agent_work_time, requester_wait_time, first_reply_time or next_reply_time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_applied_at": {"name": "sla_applied_at", "description": "When the SLA target was triggered. This is the starting time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "target": {"name": "target", "description": "The SLA target, in minutes", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "in_business_hours": {"name": "in_business_hours", "description": "Boolean field indicating if the SLA target is in business hours (true) or calendar hours (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_breach_at": {"name": "sla_breach_at", "description": "The time or expected time of the SLA breach or achieve event.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_elapsed_time": {"name": "sla_elapsed_time", "description": "The total elapsed time to achieve the SLA metric whether breached or achieved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active_sla": {"name": "is_active_sla", "description": "Boolean field indicating that the SLA event is currently active and not breached (true) or past (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_sla_breach": {"name": "is_sla_breach", "description": "Boolean field indicating if the SLA has been breached (true) or was achieved (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.9226859, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__sla_policies\"", "raw_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from {{ ref('int_zendesk__reply_time_combined') }}\n\n), agent_work_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_calendar_hours') }}\n\n), requester_wait_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), agent_work_business_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_business_hours') }}\n\n), requester_wait_business_sla as (\n select *\n from {{ ref('int_zendesk__requester_wait_time_business_hours') }}\n\n{% endif %}\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n{% if var('using_schedules', True) %}\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n{% endif %}\n\n)\n\nselect \n {{ dbt_utils.generate_surrogate_key(['ticket_id', 'metric', 'sla_applied_at']) }} as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then ({{ dbt.datediff(\"sla_applied_at\", dbt.current_timestamp_backcompat(), 'second') }} / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > {{ dbt.current_timestamp_backcompat() }})\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_combined", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_business_hours", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.max_bool", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.current_timestamp_backcompat", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__sla_policies.sql", "compiled": true, "compiled_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_combined\"\n\n), agent_work_calendar_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"\n\n), requester_wait_calendar_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"\n\n\n\n), agent_work_business_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"\n\n), requester_wait_business_sla as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"\n\n\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n\n\n)\n\nselect \n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(metric as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sla_applied_at as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (sla_applied_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (sla_applied_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (sla_applied_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (sla_applied_at)::timestamp)))\n / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > \n current_timestamp::timestamp\n)\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_backlog": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_backlog", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_backlog.sql", "original_file_path": "models/zendesk__ticket_backlog.sql", "unique_id": "model.zendesk.zendesk__ticket_backlog", "fqn": ["zendesk", "zendesk__ticket_backlog"], "alias": "zendesk__ticket_backlog", "checksum": {"name": "sha256", "checksum": "546f8460ab16ce0f4671b1ae5742bfdb0f97bc4184c9da30cd21de81400922f7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable for all backlog tickets. Backlog tickets being defined as any ticket not a 'closed', 'deleted', or 'solved' status.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel where the ticket was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The assignee name assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1725387303.940202, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_backlog\"", "raw_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n{{ config(enabled = 'status' in var('ticket_field_history_columns')) }}\n\nwith ticket_field_history as (\n select *\n from {{ ref('zendesk__ticket_field_history') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), group_names as (\n select *\n from {{ ref('stg_zendesk__group') }}\n\n), users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), brands as (\n select *\n from {{ ref('stg_zendesk__brand') }}\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n{% if 'ticket_form_id' in var('ticket_field_history_columns') %}\n), ticket_forms as (\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), organizations as (\n select *\n from {{ ref('stg_zendesk__organization') }}\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n {% for col in var('ticket_field_history_columns') if col != 'status' %} --Looking at all history fields the users passed through in their dbt_project.yml file\n {% if col in ['assignee_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n {% elif col in ['requester_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,requester.name as requester_name\n\n {% elif col in ['ticket_form_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,ticket_forms.name as ticket_form_name\n\n {% elif col in ['organization_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,organizations.name as organization_name\n\n {% elif col in ['brand_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,brands.name as brand_name\n\n {% elif col in ['group_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,group_names.name as group_name\n\n {% elif col in ['locale_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.locale as local_name\n\n {% else %} --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.{{ col }}\n {% endif %}\n {% endfor %}\n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n {% if 'ticket_form_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join ticket_forms\n on ticket_forms.ticket_form_id = cast(ticket_field_history.ticket_form_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'group_id' in var('ticket_field_history_columns') %}--Join not needed if field is not located in variable, otherwise it is included.\n left join group_names\n on group_names.group_id = cast(ticket_field_history.group_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'assignee_id' in var('ticket_field_history_columns') or 'requester_id' in var('ticket_field_history_columns') or 'locale_id' in var('ticket_field_history_columns')%} --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'requester_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join users as requester\n on requester.user_id = cast(ticket_field_history.requester_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'brand_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join brands\n on brands.brand_id = cast(ticket_field_history.brand_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'organization_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join organizations\n on organizations.organization_id = cast(ticket_field_history.organization_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "language": "sql", "refs": [{"name": "zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}, {"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_bigint"], "nodes": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_backlog.sql", "compiled": true, "compiled_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n\n\nwith ticket_field_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"\n\n), tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), group_names as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), brands as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n --Looking at all history fields the users passed through in their dbt_project.yml file\n --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n \n --Looking at all history fields the users passed through in their dbt_project.yml file\n --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.priority\n \n \n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n \n\n \n\n --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as bigint)\n \n\n \n\n \n\n \n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__sla_policy_applied": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/int_zendesk__sla_policy_applied.sql", "original_file_path": "models/sla_policy/int_zendesk__sla_policy_applied.sql", "unique_id": "model.zendesk.int_zendesk__sla_policy_applied", "fqn": ["zendesk", "sla_policy", "int_zendesk__sla_policy_applied"], "alias": "int_zendesk__sla_policy_applied", "checksum": {"name": "sha256", "checksum": "5879f6ab082c64d3650de0c8a5b3ec5ee85e25eb99646451eab7e9d6499c4d19"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.2869601, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"", "raw_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), sla_policy_name as (\n\n select \n *\n from {{ ref('int_zendesk__updates') }}\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast({{ fivetran_utils.json_parse('ticket_field_history.value', ['minutes']) }} as {{ dbt.type_int() }} ) as target,\n {{ fivetran_utils.json_parse('ticket_field_history.value', ['in_business_hours']) }} = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, {{ dbt.current_timestamp_backcompat() }}) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.json_parse", "macro.dbt.type_int", "macro.dbt.current_timestamp_backcompat"], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__ticket_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/int_zendesk__sla_policy_applied.sql", "compiled": true, "compiled_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), sla_policy_name as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast(\n\n ticket_field_history.value::json #>> '{minutes}'\n\n as integer ) as target,\n \n\n ticket_field_history.value::json #>> '{in_business_hours}'\n\n = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, \n current_timestamp::timestamp\n) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_business_hours"], "alias": "int_zendesk__agent_work_time_business_hours", "checksum": {"name": "sha256", "checksum": "4c57832cfb5ee57c94f35d90219a0f9d3d73cc8940fc1e70ea2ddb4b4923f98a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1725387303.2929, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n \n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes\n from weekly_period_agent_work_time\n left join schedule\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"\" ~ dbt.date_trunc('week', 'valid_starting_at') ~ \"\",\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom agent_work_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes\n from weekly_period_agent_work_time\n left join schedule\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n \n\n date_trunc('week', valid_starting_at) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom agent_work_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_calendar_hours"], "alias": "int_zendesk__agent_work_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "f25752139fd2e10c5d666783a5abbf36e9d81b6a4e0012f6e42d816e8d20aa81"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.311729, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"", "raw_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_filtered_statuses"], "alias": "int_zendesk__agent_work_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "fbb6aeccc9d5c6ec4e48160a9f5fdf94c7be4e3639d19a3e55e64ecbedccaa62"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.3156512, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"", "raw_code": "with agent_work_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp_backcompat() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with agent_work_time_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n \n current_timestamp::timestamp\n + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_business_hours"], "alias": "int_zendesk__reply_time_business_hours", "checksum": {"name": "sha256", "checksum": "9ff6bb4774c2854a7d21ac27ac2690db52bc80920ae8d4e88680631557a9b590"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1725387303.31987, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), ticket_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from {{ ref('stg_zendesk__schedule') }}\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(sla_policy_applied.sla_applied_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n {{ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') }} as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_created_at') }} <= sla_policy_applied.sla_applied_at\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_invalidated_at') }} > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n {{ dbt_utils.group_by(n=14) }}\n\n), week_index_calc as (\n select \n *,\n {{ dbt.datediff(\"sla_applied_at\", \"least(coalesce(first_reply_time, \" ~ dbt.current_timestamp() ~ \"), coalesce(first_solved_time, \" ~ dbt.current_timestamp() ~ \"))\", \"week\") }} + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast((7*24*60) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n {{ dbt.date_trunc('week', 'sla_applied_at') }} as starting_point,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_breach_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_start_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_start_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_end_at,\n {{ dbt_date.week_end(\"sla_applied_at\", tz=\"America/UTC\") }} as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "language": "sql", "refs": [{"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.fivetran_utils.timestamp_add", "macro.dbt_utils.group_by", "macro.dbt.current_timestamp", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt_date.week_end"], "nodes": ["model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), sla_policy_applied as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), ticket_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n (\n (\n (\n (\n ((cast(sla_policy_applied.sla_applied_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and \n\n ticket_schedules.schedule_created_at + ((interval '1 second') * (-1))\n\n <= sla_policy_applied.sla_applied_at\n and \n\n ticket_schedules.schedule_invalidated_at + ((interval '1 second') * (-1))\n\n > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10,11,12,13,14\n\n), week_index_calc as (\n select \n *,\n \n (\n ((least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::date - (sla_applied_at)::date)\n / 7 + case\n when date_part('dow', (sla_applied_at)::timestamp) <= date_part('dow', (least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::timestamp) then\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 0 else -1 end\n else\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 1 else 0 end\n end)\n + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as integer) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast((7*24*60) as integer) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n date_trunc('week', sla_applied_at) as starting_point,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as integer )))\n\n as sla_breach_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_start_time) as integer )))\n\n as sla_schedule_start_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time) as integer )))\n\n as sla_schedule_end_at,\n cast(\n\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) + ((interval '1 day') * (6))\n\n as date) as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_calendar_hours"], "alias": "int_zendesk__reply_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "6ec2775efbac4d405efd0b30a1ec5c593e140c3f4a1be4ff8df7fd0cd4791a2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.335279, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"", "raw_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), final as (\n select\n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(target as \" ~ dbt.type_int() ~ \" )\",\n \"sla_applied_at\" ) }} as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "compiled": true, "compiled_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), final as (\n select\n *,\n \n\n sla_applied_at + ((interval '1 minute') * (cast(target as integer )))\n\n as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_combined": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_combined", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_combined", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_combined"], "alias": "int_zendesk__reply_time_combined", "checksum": {"name": "sha256", "checksum": "3a7a8ddea0400ea314ff4ae83b81654414788634e76af330bf27c384733ac43b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.338749, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_combined\"", "raw_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from {{ ref('int_zendesk__reply_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), reply_time_business_hours_sla as (\n\n select *\n from {{ ref('int_zendesk__reply_time_business_hours') }}\n\n{% endif %}\n\n), ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as {{ dbt.type_numeric() }}) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as {{ dbt.type_numeric() }}) as week_number,\n cast(null as {{ dbt.type_numeric() }}) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n{% if var('using_schedules', True) %}\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n{% endif %}\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n {{ dbt_utils.group_by(n=10) }}\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n {{ dbt.datediff(\"sla_schedule_start_at\", \"agent_reply_at\", 'second') }} / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and {{ dbt.current_timestamp() }} >= sla_schedule_start_at and ({{ dbt.current_timestamp() }} < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= {{ dbt.current_timestamp() }}) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n {{ dbt.current_timestamp() }} as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + ({{ dbt.datediff(\"sla_schedule_start_at\", \"coalesce(agent_reply_at, next_solved_at, current_time_check)\", 'second') }} / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__reply_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_numeric", "macro.dbt_utils.group_by", "macro.dbt.datediff", "macro.dbt.current_timestamp"], "nodes": ["model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "compiled": true, "compiled_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"\n\n\n\n), reply_time_business_hours_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"\n\n\n\n), ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as numeric(28,6)) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as numeric(28,6)) as week_number,\n cast(null as numeric(28,6)) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n \n (\n (\n (\n ((agent_reply_at)::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (agent_reply_at)::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (agent_reply_at)::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (agent_reply_at)::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and now() >= sla_schedule_start_at and (now() < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= now()) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n now() as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + (\n (\n (\n (\n ((coalesce(agent_reply_at, next_solved_at, current_time_check))::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_calendar_hours"], "alias": "int_zendesk__requester_wait_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "adaa86b537177e2792f3b8e48def56a520c6a442b11f3859c649f549d4b60087"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.346454, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"", "raw_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_business_hours"], "alias": "int_zendesk__requester_wait_time_business_hours", "checksum": {"name": "sha256", "checksum": "179e24b5711842dc5bd594ae5e927902d89be3db0c9943bf7ac2ee3528433e46"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1725387303.350284, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes\n from weekly_period_requester_wait_time\n left join schedule\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"\" ~ dbt.date_trunc('week', 'valid_starting_at') ~ \"\",\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes\n from weekly_period_requester_wait_time\n left join schedule\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n \n\n date_trunc('week', valid_starting_at) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_filtered_statuses"], "alias": "int_zendesk__requester_wait_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "1ddb077adfbf13244c13cb12643a6914f5eac17c714885eac834f9e1eee88475"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.3582091, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"", "raw_code": "with requester_wait_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp_backcompat() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with requester_wait_time_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n \n current_timestamp::timestamp\n + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_reply_times", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times"], "alias": "int_zendesk__ticket_reply_times", "checksum": {"name": "sha256", "checksum": "6de1b30f99a9bbd078c823538ca0e87c5b57d33160f65c290ecd67765e8d4472"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1725387303.3622599, "relation_name": null, "raw_code": "with ticket_public_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n ({{ dbt.datediff(\n 'end_user_comment_created_at',\n 'agent_responded_at',\n 'second') }} / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_reply_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times_calendar.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times_calendar"], "alias": "int_zendesk__ticket_reply_times_calendar", "checksum": {"name": "sha256", "checksum": "6fb6a60134019d78fcfc8c135b4a7887b3ce52ec53d8db463194f7824d2c71c2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1725387303.365616, "relation_name": null, "raw_code": "with ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_reply_times"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comments_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comments_enriched", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__comments_enriched.sql", "original_file_path": "models/reply_times/int_zendesk__comments_enriched.sql", "unique_id": "model.zendesk.int_zendesk__comments_enriched", "fqn": ["zendesk", "reply_times", "int_zendesk__comments_enriched"], "alias": "int_zendesk__comments_enriched", "checksum": {"name": "sha256", "checksum": "970004a2aa343ae78a3f810828600c7eca8585428b52b05e4353f9debc6f1af5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1725387303.3675, "relation_name": null, "raw_code": "with ticket_comment as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'comment'\n\n), users as (\n\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__comments_enriched.sql", "compiled": true, "compiled_code": "with ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_reply_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_first_reply_time_business", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_first_reply_time_business.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_first_reply_time_business"], "alias": "int_zendesk__ticket_first_reply_time_business", "checksum": {"name": "sha256", "checksum": "0bacc5f74a5eac2a55c2b0bacb1a0b7908783948ad162b84c230be9310dd02b5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1725387303.3689559, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n), ticket_schedules as (\n\n select \n *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_enriched", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_enriched.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_enriched.sql", "unique_id": "model.zendesk.int_zendesk__field_history_enriched", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_enriched"], "alias": "int_zendesk__field_history_enriched", "checksum": {"name": "sha256", "checksum": "cdf920b1df5fee8c6a08b0e26996028d327964903e8acc4dd15498d23c00005c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1725387303.3756201, "relation_name": null, "raw_code": "with ticket_field_history as (\n\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), updater_info as (\n select *\n from {{ ref('int_zendesk__updater_information') }}\n\n), final as (\n select\n ticket_field_history.*\n\n {% if var('ticket_field_history_updater_columns')%} --The below will be run if any fields are included in the variable within the dbt_project.yml.\n {% for col in var('ticket_field_history_updater_columns') %} --Iterating through the updater fields included in the variable.\n\n --The below statements are needed to populate Zendesk automated fields for when the zendesk triggers automatically change fields based on user defined triggers.\n {% if col in ['updater_is_active'] %}\n ,coalesce(updater_info.{{ col|lower }}, true) as {{ col }}\n\n {% elif col in ['updater_user_id','updater_organization_id'] %}\n ,coalesce(updater_info.{{ col|lower }}, -1) as {{ col }}\n \n {% elif col in ['updater_last_login_at'] %}\n ,coalesce(updater_info.{{ col|lower }}, current_timestamp) as {{ col }}\n \n {% else %}\n ,coalesce(updater_info.{{ col|lower }}, concat('zendesk_trigger_change_', '{{ col }}' )) as {{ col }}\n \n {% endif %}\n {% endfor %}\n {% endif %} \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "int_zendesk__updater_information", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk.int_zendesk__updater_information"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_enriched.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_pivot": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_pivot", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_pivot.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_pivot.sql", "unique_id": "model.zendesk.int_zendesk__field_history_pivot", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_pivot"], "alias": "int_zendesk__field_history_pivot", "checksum": {"name": "sha256", "checksum": "077bf8d76ba0523c2ebb987be0fd0746acbdae8fdbdd39fc7a03203a5d070f87"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1725387303.38044, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\"", "raw_code": "-- depends_on: {{ source('zendesk', 'ticket_field_history') }}\n\n{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{% if execute -%}\n {% set results = run_query('select distinct field_name from ' ~ source('zendesk', 'ticket_field_history') ) %}\n {% set results_list = results.columns[0].values() %}\n{% endif -%}\n\nwith field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n ,\n {{ var('ticket_field_history_updater_columns') | join (\", \")}}\n\n {% endif %}\n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from {{ ref('int_zendesk__field_history_enriched') }}\n {% if is_incremental() %}\n where cast( {{ dbt.date_trunc('day', 'valid_starting_at') }} as date) >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast({{ dbt.date_trunc('day', 'valid_starting_at') }} as date) as date_day\n\n {% for col in results_list if col in var('ticket_field_history_columns') %}\n {% set col_xf = col|lower %}\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.value end) as {{ col_xf }}\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n\n {% for upd in var('ticket_field_history_updater_columns') %}\n\n {% set upd_xf = (col|lower + '_' + upd ) %} --Creating the appropriate column name based on the history field + update field names.\n\n {% if upd == 'updater_is_active' and target.type in ('postgres', 'redshift') %}\n\n ,bool_or(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% else %}\n\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% endif %}\n {% endfor %}\n {% endif %}\n {% endfor %}\n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n {{ dbt_utils.generate_surrogate_key(['ticket_id','date_day'])}} as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_enriched", "package": null, "version": null}], "sources": [["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.date_trunc", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.run_query"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history", "model.zendesk.int_zendesk__field_history_enriched"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_pivot.sql", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"\n\n\n\n\n \nwith __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n), field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from __dbt__cte__int_zendesk__field_history_enriched\n \n where cast( date_trunc('day', valid_starting_at) as date) >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\")\n \n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast(date_trunc('day', valid_starting_at) as date) as date_day\n\n \n \n ,min(case when lower(field_name) = 'status' then filtered.value end) as status\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'assignee_id' then filtered.value end) as assignee_id\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'priority' then filtered.value end) as priority\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}, {"id": "model.zendesk.int_zendesk__field_history_enriched", "sql": " __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updater_information": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updater_information", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__updater_information.sql", "original_file_path": "models/ticket_history/int_zendesk__updater_information.sql", "unique_id": "model.zendesk.int_zendesk__updater_information", "fqn": ["zendesk", "ticket_history", "int_zendesk__updater_information"], "alias": "int_zendesk__updater_information", "checksum": {"name": "sha256", "checksum": "62a690646cff991c0e0b6e205440a070bb44aab8d4d9286714710c52a4c6677a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1725387303.3901188, "relation_name": null, "raw_code": "with users as (\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), organizations as (\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,users.user_tags as updater_user_tags\n {% endif %}\n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,organizations.domain_names as updater_organization_domain_names\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,organizations.organization_tags as updater_organization_organization_tags\n {% endif %}\n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__updater_information.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_scd": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_scd", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_scd.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_scd.sql", "unique_id": "model.zendesk.int_zendesk__field_history_scd", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_scd"], "alias": "int_zendesk__field_history_scd", "checksum": {"name": "sha256", "checksum": "a748f9163dc6edaca993c8a3f5e3cecc9d057d3b47817d403e0b0778deda2466"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.394377, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_scd\"", "raw_code": "-- model needs to materialize as a table to avoid erroneous null values\n{{ config( materialized='table') }} \n\n{% set ticket_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_pivot')) %}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_pivot') }}\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,{{ col.name }}\n ,sum(case when {{ col.name }} is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as {{ col.name }}_field_partition\n {% endfor %}\n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,first_value( {{ col.name }} ) over (partition by {{ col.name }}_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as {{ col.name }}\n \n {% endfor %}\n from set_values\n) \n\nselect *\nfrom fill_values", "language": "sql", "refs": [{"name": "int_zendesk__field_history_pivot", "package": null, "version": null}, {"name": "int_zendesk__field_history_pivot", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__field_history_pivot"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_scd.sql", "compiled": true, "compiled_code": "-- model needs to materialize as a table to avoid erroneous null values\n \n\n\n\nwith change_data as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\"\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n \n\n ,status\n ,sum(case when status is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as status_field_partition\n \n\n ,assignee_id\n ,sum(case when assignee_id is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as assignee_id_field_partition\n \n\n ,priority\n ,sum(case when priority is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as priority_field_partition\n \n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n \n\n ,first_value( status ) over (partition by status_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as status\n \n \n\n ,first_value( assignee_id ) over (partition by assignee_id_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as assignee_id\n \n \n\n ,first_value( priority ) over (partition by priority_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as priority\n \n \n from set_values\n) \n\nselect *\nfrom fill_values", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_calendar_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_calendar_spine.sql", "original_file_path": "models/ticket_history/int_zendesk__field_calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__field_calendar_spine", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_calendar_spine"], "alias": "int_zendesk__field_calendar_spine", "checksum": {"name": "sha256", "checksum": "79bd1e8de549cfc22088000a4171419b554d6b0fa931a1f2deaabaed7e01e72b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1725387303.399713, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\"", "raw_code": "{{\n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n )\n}}\n\nwith calendar as (\n\n select *\n from {{ ref('int_zendesk__calendar_spine') }}\n {% if is_incremental() %}\n where date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( {{ dbt.date_trunc('day', \"case when status != 'closed' then \" ~ dbt.current_timestamp_backcompat() ~ \" else updated_at end\") }} as date) as open_until\n from {{ var('ticket') }}\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and {{ dbt.dateadd('month', var('ticket_field_history_extension_months', 0), 'ticket.open_until') }} >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__calendar_spine", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.current_timestamp_backcompat", "macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_utils.generate_surrogate_key"], "nodes": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_calendar_spine.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1671\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), calendar as (\n\n select *\n from __dbt__cte__int_zendesk__calendar_spine\n \n where date_day >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\")\n \n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( date_trunc('day', case when status != 'closed' then \n current_timestamp::timestamp\n else updated_at end) as date) as open_until\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and \n\n ticket.open_until + ((interval '1 month') * (0))\n\n >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__calendar_spine", "sql": " __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1671\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_work_time_calendar", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_calendar"], "alias": "int_zendesk__ticket_work_time_calendar", "checksum": {"name": "sha256", "checksum": "e3cda559c663cc0e6ef1defcf5d8c418bbb9c20bb60aa118fc698579b3c37814"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1725387303.40665, "relation_name": null, "raw_code": "with ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "compiled": true, "compiled_code": "with ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_work_time_business", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_business.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_business", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_business"], "alias": "int_zendesk__ticket_work_time_business", "checksum": {"name": "sha256", "checksum": "9ea4023c98c8bdebaf01445490e058d4766cb32a45db569e01e91fa8eac2e689"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1725387303.409292, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where {{ dbt.datediff('greatest(valid_starting_at, schedule_created_at)', 'least(valid_ending_at, schedule_invalidated_at)', 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.status_schedule_start as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.status_schedule_start',\n 'ticket_status_crossed_with_schedule.status_schedule_end',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=7) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "compiled": true, "compiled_code": "\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__calendar_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "utils/int_zendesk__calendar_spine.sql", "original_file_path": "models/utils/int_zendesk__calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__calendar_spine", "fqn": ["zendesk", "utils", "int_zendesk__calendar_spine"], "alias": "int_zendesk__calendar_spine", "checksum": {"name": "sha256", "checksum": "2131dbec96be6f5fee780a243b7f48940504a36a33c6fe1b66b24be1a8396928"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1725387303.41728, "relation_name": null, "raw_code": "-- depends_on: {{ source('zendesk', 'ticket') }}\n\nwith spine as (\n\n {% if execute %}\n {% set current_ts = dbt.current_timestamp_backcompat() %}\n {% set first_date_query %}\n select min( created_at ) as min_date from {{ source('zendesk', 'ticket') }}\n -- by default take all the data \n where cast(created_at as date) >= {{ dbt.dateadd('year', - var('ticket_field_history_timeframe_years', 50), current_ts ) }}\n {% endset %}\n\n {% set first_date = run_query(first_date_query).columns[0][0]|string %}\n \n {% if target.type == 'postgres' %}\n {% set first_date_adjust = \"cast('\" ~ first_date[0:10] ~ \"' as date)\" %}\n\n {% else %}\n {% set first_date_adjust = \"'\" ~ first_date[0:10] ~ \"'\" %}\n\n {% endif %}\n\n {% else %} {% set first_date_adjust = \"2016-01-01\" %}\n {% endif %}\n\n\n{{\n dbt_utils.date_spine(\n datepart = \"day\", \n start_date = first_date_adjust,\n end_date = dbt.dateadd(\"week\", 1, \"current_date\")\n ) \n}}\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast", "language": "sql", "refs": [], "sources": [["zendesk", "ticket"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_utils.date_spine", "macro.dbt.current_timestamp_backcompat", "macro.dbt.run_query"], "nodes": ["source.zendesk_source.zendesk.ticket"]}, "compiled_path": "target/compiled/zendesk/models/utils/int_zendesk__calendar_spine.sql", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1671\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_resolution_times_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_resolution_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_resolution_times_calendar"], "alias": "int_zendesk__ticket_resolution_times_calendar", "checksum": {"name": "sha256", "checksum": "0c3e1e19084b3e1829c18b80315e8f64aaf63e94522fc56d64652e89b02afadc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1725387303.430285, "relation_name": null, "raw_code": "with historical_solved_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n where status = 'solved'\n\n), ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_historical_assignee as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_assignee') }}\n\n), ticket_historical_group as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_group') }}\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n {{ dbt.datediff(\n 'ticket_historical_assignee.first_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as first_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket_historical_assignee.last_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as last_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.first_solved_at',\n 'minute' ) }} as first_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.last_solved_at',\n 'minute') }} as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_assignee", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "compiled": true, "compiled_code": "with historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_resolution_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_first_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_first_resolution_time_business"], "alias": "int_zendesk__ticket_first_resolution_time_business", "checksum": {"name": "sha256", "checksum": "92b30d97de3fa5a059b70ef930d731bc7cfeb93a39206970f37ed605264c01af"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1725387303.434887, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_full_resolution_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_full_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_full_resolution_time_business"], "alias": "int_zendesk__ticket_full_resolution_time_business", "checksum": {"name": "sha256", "checksum": "c14c73bcfcc33dc8bc6a94827770c47f4e70f4608f3227bbbc1f10cbcad4c572"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1725387303.441992, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/zendesk__document.sql", "original_file_path": "models/unstructured/zendesk__document.sql", "unique_id": "model.zendesk.zendesk__document", "fqn": ["zendesk", "unstructured", "zendesk__document"], "alias": "zendesk__document", "checksum": {"name": "sha256", "checksum": "0d3d8f2e10bcc679a958386cd5b13f616e17139821263f12c8dddef34c93b21b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk ticket, enriched with data about it's tags, assignees, requester, submitter, organization and group.", "columns": {"document_id": {"name": "document_id", "description": "Equivalent to `ticket_id`.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk_index": {"name": "chunk_index", "description": "The index of the chunk associated with the `document_id`.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk_tokens_approximate": {"name": "chunk_tokens_approximate", "description": "Approximate number of tokens for the chunk, assuming 4 characters per token.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk": {"name": "chunk", "description": "The text of the chunk.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/unstructured/zendesk_unstructured.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1725387303.983475, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith ticket_document as (\n select *\n from {{ ref('int_zendesk__ticket_document') }}\n\n), grouped as (\n select *\n from {{ ref('int_zendesk__ticket_comment_documents_grouped') }}\n\n), final as (\n select\n cast(ticket_document.ticket_id as {{ dbt.type_string() }}) as document_id,\n grouped.chunk_index,\n grouped.chunk_tokens as chunk_tokens_approximate,\n {{ dbt.concat([\n \"ticket_document.ticket_markdown\",\n \"'\\\\n\\\\n## COMMENTS\\\\n\\\\n'\",\n \"grouped.comments_group_markdown\"]) }}\n as chunk\n from ticket_document\n join grouped\n on grouped.ticket_id = ticket_document.ticket_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__ticket_document", "package": null, "version": null}, {"name": "int_zendesk__ticket_comment_documents_grouped", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.concat"], "nodes": ["model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__ticket_comment_documents_grouped"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/zendesk__document.sql", "compiled": true, "compiled_code": "\n\nwith ticket_document as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_document\"\n\n), grouped as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_documents_grouped\"\n\n), final as (\n select\n cast(ticket_document.ticket_id as TEXT) as document_id,\n grouped.chunk_index,\n grouped.chunk_tokens as chunk_tokens_approximate,\n ticket_document.ticket_markdown || '\\n\\n## COMMENTS\\n\\n' || grouped.comments_group_markdown\n as chunk\n from ticket_document\n join grouped\n on grouped.ticket_id = ticket_document.ticket_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_comment_documents_grouped": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_documents_grouped", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "unique_id": "model.zendesk.int_zendesk__ticket_comment_documents_grouped", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_comment_documents_grouped"], "alias": "int_zendesk__ticket_comment_documents_grouped", "checksum": {"name": "sha256", "checksum": "ad03266e19d20396ca75812cb98816f3e11e078c63c30807790903674f4db42b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1725387303.452148, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_documents_grouped\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith filtered_comment_documents as (\n select *\n from {{ ref('int_zendesk__ticket_comment_document') }}\n),\n\ngrouped_comment_documents as (\n select \n ticket_id,\n comment_markdown,\n comment_tokens,\n comment_time,\n sum(comment_tokens) over (\n partition by ticket_id \n order by comment_time\n rows between unbounded preceding and current row\n ) as cumulative_length\n from filtered_comment_documents\n)\n\nselect \n ticket_id,\n cast({{ dbt_utils.safe_divide('floor(cumulative_length - 1)', var('zendesk_max_tokens', 5000)) }} as {{ dbt.type_int() }}) as chunk_index,\n {{ dbt.listagg(\n measure=\"comment_markdown\",\n delimiter_text=\"'\\\\n\\\\n---\\\\n\\\\n'\",\n order_by_clause=\"order by comment_time\"\n ) }} as comments_group_markdown,\n sum(comment_tokens) as chunk_tokens\nfrom grouped_comment_documents\ngroup by 1,2", "language": "sql", "refs": [{"name": "int_zendesk__ticket_comment_document", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.safe_divide", "macro.dbt.type_int", "macro.dbt.listagg"], "nodes": ["model.zendesk.int_zendesk__ticket_comment_document"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "compiled": true, "compiled_code": "\n\nwith filtered_comment_documents as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_document\"\n),\n\ngrouped_comment_documents as (\n select \n ticket_id,\n comment_markdown,\n comment_tokens,\n comment_time,\n sum(comment_tokens) over (\n partition by ticket_id \n order by comment_time\n rows between unbounded preceding and current row\n ) as cumulative_length\n from filtered_comment_documents\n)\n\nselect \n ticket_id,\n cast(\n ( floor(cumulative_length - 1) ) / nullif( ( 5000 ), 0)\n as integer) as chunk_index,\n \n string_agg(\n comment_markdown,\n '\\n\\n---\\n\\n'\n order by comment_time\n ) as comments_group_markdown,\n sum(comment_tokens) as chunk_tokens\nfrom grouped_comment_documents\ngroup by 1,2", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_comment_document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "unique_id": "model.zendesk.int_zendesk__ticket_comment_document", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_comment_document"], "alias": "int_zendesk__ticket_comment_document", "checksum": {"name": "sha256", "checksum": "e75f893dec0ca7599db16793ad9b39bf5d33f463abe6fa4d7be8019e095f45d8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1725387303.4594922, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith ticket_comments as (\n select *\n from {{ var('ticket_comment') }}\n\n), users as (\n select *\n from {{ var('user') }}\n\n), comment_details as (\n select \n ticket_comments.ticket_comment_id,\n ticket_comments.ticket_id,\n {{ zendesk.coalesce_cast([\"users.email\", \"'UNKNOWN'\"], dbt.type_string()) }} as commenter_email,\n {{ zendesk.coalesce_cast([\"users.name\", \"'UNKNOWN'\"], dbt.type_string()) }} as commenter_name,\n ticket_comments.created_at as comment_time,\n ticket_comments.body as comment_body\n from ticket_comments\n left join users\n on ticket_comments.user_id = users.user_id\n where not coalesce(ticket_comments._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), comment_markdowns as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n cast(\n {{ dbt.concat([\n \"'### message from '\", \"commenter_name\", \"' ('\", \"commenter_email\", \"')\\\\n'\",\n \"'##### sent @ '\", \"comment_time\", \"'\\\\n'\",\n \"comment_body\"\n ]) }} as {{ dbt.type_string() }})\n as comment_markdown\n from comment_details\n\n), comments_tokens as (\n select\n *,\n {{ zendesk.count_tokens(\"comment_markdown\") }} as comment_tokens\n from comment_markdowns\n\n), truncated_comments as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n case when comment_tokens > {{ var('zendesk_max_tokens', 5000) }} then left(comment_markdown, {{ var('zendesk_max_tokens', 5000) }} * 4) -- approximate 4 characters per token\n else comment_markdown\n end as comment_markdown,\n case when comment_tokens > {{ var('zendesk_max_tokens', 5000) }} then {{ var('zendesk_max_tokens', 5000) }}\n else comment_tokens\n end as comment_tokens\n from comments_tokens\n)\n\nselect *\nfrom truncated_comments", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.zendesk.coalesce_cast", "macro.dbt.concat", "macro.zendesk.count_tokens"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "compiled": true, "compiled_code": "\n\nwith ticket_comments as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), comment_details as (\n select \n ticket_comments.ticket_comment_id,\n ticket_comments.ticket_id,\n \n coalesce(\n cast(users.email as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as commenter_email,\n \n coalesce(\n cast(users.name as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as commenter_name,\n ticket_comments.created_at as comment_time,\n ticket_comments.body as comment_body\n from ticket_comments\n left join users\n on ticket_comments.user_id = users.user_id\n where not coalesce(ticket_comments._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), comment_markdowns as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n cast(\n '### message from ' || commenter_name || ' (' || commenter_email || ')\\n' || '##### sent @ ' || comment_time || '\\n' || comment_body as TEXT)\n as comment_markdown\n from comment_details\n\n), comments_tokens as (\n select\n *,\n \n \n\n length(\n comment_markdown\n ) / 4\n as comment_tokens\n from comment_markdowns\n\n), truncated_comments as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n case when comment_tokens > 5000 then left(comment_markdown, 5000 * 4) -- approximate 4 characters per token\n else comment_markdown\n end as comment_markdown,\n case when comment_tokens > 5000 then 5000\n else comment_tokens\n end as comment_tokens\n from comments_tokens\n)\n\nselect *\nfrom truncated_comments", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_document.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_document.sql", "unique_id": "model.zendesk.int_zendesk__ticket_document", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_document"], "alias": "int_zendesk__ticket_document", "checksum": {"name": "sha256", "checksum": "1fd6807d45c4904ff1ecbc4b929c675ae0b766b40a711641af85cfe4c6cae4ec"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1725387303.4689212, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith tickets as (\n select *\n from {{ var('ticket') }}\n\n), users as (\n select *\n from {{ var('user') }}\n\n), ticket_details as (\n select\n tickets.ticket_id,\n tickets.subject AS ticket_name,\n {{ zendesk.coalesce_cast([\"users.name\", \"'UNKNOWN'\"], dbt.type_string()) }} as user_name,\n {{ zendesk.coalesce_cast([\"users.email\", \"'UNKNOWN'\"], dbt.type_string()) }} as created_by,\n tickets.created_at AS created_on,\n {{ zendesk.coalesce_cast([\"tickets.status\", \"'UNKNOWN'\"], dbt.type_string()) }} as status,\n {{ zendesk.coalesce_cast([\"tickets.priority\", \"'UNKNOWN'\"], dbt.type_string()) }} as priority\n from tickets\n left join users\n on tickets.requester_id = users.user_id\n where not coalesce(tickets._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), final as (\n select\n ticket_id,\n {{ dbt.concat([\n \"'# Ticket : '\", \"ticket_name\", \"'\\\\n\\\\n'\",\n \"'Created By : '\", \"user_name\", \"' ('\", \"created_by\", \"')\\\\n'\",\n \"'Created On : '\", \"created_on\", \"'\\\\n'\",\n \"'Status : '\", \"status\", \"'\\\\n'\",\n \"'Priority : '\", \"priority\"\n ]) }} as ticket_markdown\n from ticket_details\n)\n\nselect \n *,\n {{ zendesk.count_tokens(\"ticket_markdown\") }} as ticket_tokens\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.zendesk.coalesce_cast", "macro.dbt.concat", "macro.zendesk.count_tokens"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_document.sql", "compiled": true, "compiled_code": "\n\nwith tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), ticket_details as (\n select\n tickets.ticket_id,\n tickets.subject AS ticket_name,\n \n coalesce(\n cast(users.name as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as user_name,\n \n coalesce(\n cast(users.email as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as created_by,\n tickets.created_at AS created_on,\n \n coalesce(\n cast(tickets.status as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as status,\n \n coalesce(\n cast(tickets.priority as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as priority\n from tickets\n left join users\n on tickets.requester_id = users.user_id\n where not coalesce(tickets._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), final as (\n select\n ticket_id,\n '# Ticket : ' || ticket_name || '\\n\\n' || 'Created By : ' || user_name || ' (' || created_by || ')\\n' || 'Created On : ' || created_on || '\\n' || 'Status : ' || status || '\\n' || 'Priority : ' || priority as ticket_markdown\n from ticket_details\n)\n\nselect \n *,\n \n \n\n length(\n ticket_markdown\n ) / 4\n as ticket_tokens\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__updates.sql", "original_file_path": "models/intermediate/int_zendesk__updates.sql", "unique_id": "model.zendesk.int_zendesk__updates", "fqn": ["zendesk", "intermediate", "int_zendesk__updates"], "alias": "int_zendesk__updates", "checksum": {"name": "sha256", "checksum": "3ecf6bfe15bd7a820b369379fff7dadf236c00ce2fe6c7e335c73c07ba67de0e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.474884, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"", "raw_code": "with ticket_history as (\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), ticket_comment as (\n select *\n from {{ ref('stg_zendesk__ticket_comment') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as {{ dbt.type_string() }}) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__updates.sql", "compiled": true, "compiled_code": "with ticket_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), ticket_comment as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as TEXT) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_assignee.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_assignee.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_assignee"], "alias": "int_zendesk__ticket_historical_assignee", "checksum": {"name": "sha256", "checksum": "7ae5d5632274b7ccf900910f272cf791e7e976e48fbd170adca647955ab5e2ae"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.478792, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"", "raw_code": "with assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then {{ dbt.datediff(\"coalesce(previous_update, ticket_created_date)\", \"valid_starting_at\", 'second') }} / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n {{ dbt_utils.group_by(n=6) }}\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_utils.group_by"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_assignee.sql", "compiled": true, "compiled_code": "with assignee_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then \n (\n (\n (\n ((valid_starting_at)::date - (coalesce(previous_update, ticket_created_date))::date)\n * 24 + date_part('hour', (valid_starting_at)::timestamp) - date_part('hour', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + date_part('minute', (valid_starting_at)::timestamp) - date_part('minute', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + floor(date_part('second', (valid_starting_at)::timestamp)) - floor(date_part('second', (coalesce(previous_update, ticket_created_date))::timestamp)))\n / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n group by 1,2,3,4,5,6\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_status": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_status.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_status.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_status", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_status"], "alias": "int_zendesk__ticket_historical_status", "checksum": {"name": "sha256", "checksum": "1c8a86acea05e857271d7f0a2b8f3ec6c09481ecdb9278f922b2df35d5410a48"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.5104918, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"", "raw_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n {{ dbt.datediff(\n 'valid_starting_at',\n \"coalesce(valid_ending_at, \" ~ dbt.current_timestamp_backcompat() ~ \")\",\n 'minute') }} as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_status.sql", "compiled": true, "compiled_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n \n (\n (\n ((coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__user_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__user_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__user_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__user_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__user_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__user_aggregates"], "alias": "int_zendesk__user_aggregates", "checksum": {"name": "sha256", "checksum": "ae23565fdc62d13c33ddb03f3b25a5e288ec6e6ffe6b57cb01496be6ecd2b73f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.514125, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"", "raw_code": "with users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n--If you use user tags this will be included, if not it will be ignored.\n{% if var('using_user_tags', True) %}\n), user_tags as (\n\n select *\n from {{ ref('stg_zendesk__user_tag') }}\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n {{ fivetran_utils.string_agg( 'user_tags.tags', \"', '\" )}} as user_tags\n from user_tags\n group by 1\n\n{% endif %}\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,user_tag_aggregate.user_tags\n {% endif %}\n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n left join user_tag_aggregate\n using(user_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__user_tag", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__user_aggregates.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n--If you use user tags this will be included, if not it will be ignored.\n\n), user_tags as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag\"\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n \n string_agg(user_tags.tags, ', ')\n\n as user_tags\n from user_tags\n group by 1\n\n\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,user_tag_aggregate.user_tags\n \n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n \n left join user_tag_aggregate\n using(user_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_spine", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_spine.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_spine.sql", "unique_id": "model.zendesk.int_zendesk__schedule_spine", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_spine"], "alias": "int_zendesk__schedule_spine", "checksum": {"name": "sha256", "checksum": "7f1a9c1fa0958132d288a2a8f980d661f6f04662b0573382c9a434574a8daf6b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1725387303.519047, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings.\n End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time)\n*/\n\nwith timezone as (\n\n select *\n from {{ var('time_zone') }}\n\n), daylight_time as (\n\n select *\n from {{ var('daylight_time') }}\n\n), schedule as (\n\n select *\n from {{ var('schedule') }} \n\n-- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules.\n), schedule_holiday as ( \n\n select\n _fivetran_synced,\n cast(date_day as {{ dbt.type_timestamp() }} ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n cast(date_day as {{ dbt.type_timestamp() }} ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n holiday_id,\n holiday_name,\n schedule_id\n\n from {{ var('schedule_holiday') }} \n inner join {{ ref('int_zendesk__calendar_spine') }} \n on holiday_start_date_at <= cast(date_day as {{ dbt.type_timestamp() }} )\n and holiday_end_date_at >= cast(date_day as {{ dbt.type_timestamp() }} )\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard schedule (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT schedule (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp_backcompat() }} as date)\n\n), calculate_schedules as (\n\n select \n schedule.schedule_id,\n schedule.time_zone,\n schedule.start_time,\n schedule.end_time,\n schedule.created_at,\n schedule.schedule_name,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes_to_add,\n -- we'll use these to determine which schedule version to associate tickets with\n cast(split_timezones.valid_from as {{ dbt.type_timestamp() }}) as valid_from,\n cast(split_timezones.valid_until as {{ dbt.type_timestamp() }}) as valid_until\n\n from schedule\n left join split_timezones\n on split_timezones.time_zone = schedule.time_zone\n\n-- Now we need take holiday's into consideration and perform the following transformations to account for Holidays in existing schedules\n), holiday_start_end_times as (\n\n select\n calculate_schedules.*,\n schedule_holiday.holiday_name,\n schedule_holiday.holiday_start_date_at,\n cast({{ dbt.dateadd(\"second\", \"86400\", \"schedule_holiday.holiday_end_date_at\") }} as {{ dbt.type_timestamp() }}) as holiday_end_date_at, -- add 24*60*60 seconds\n cast({{ dbt_date.week_start(\"schedule_holiday.holiday_start_date_at\") }} as {{ dbt.type_timestamp() }}) as holiday_week_start,\n cast({{ dbt_date.week_end(\"schedule_holiday.holiday_end_date_at\") }} as {{ dbt.type_timestamp() }}) as holiday_week_end\n from schedule_holiday\n inner join calculate_schedules\n on calculate_schedules.schedule_id = schedule_holiday.schedule_id\n and schedule_holiday.holiday_start_date_at >= calculate_schedules.valid_from \n and schedule_holiday.holiday_start_date_at < calculate_schedules.valid_until\n\n-- Let's calculate the start and end date of the Holiday in terms of minutes from Sunday (like other Zendesk schedules)\n), holiday_minutes as(\n\n select\n holiday_start_end_times.*,\n {{ dbt.datediff(\"holiday_week_start\", \"holiday_start_date_at\", \"minute\") }} - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_start,\n {{ dbt.datediff(\"holiday_week_start\", \"holiday_end_date_at\", \"minute\") }} - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_end\n from holiday_start_end_times\n left join timezone\n on timezone.time_zone = holiday_start_end_times.time_zone\n\n-- Determine which schedule days include a holiday\n), holiday_check as (\n\n select\n *,\n case when minutes_from_sunday_start < start_time_utc and minutes_from_sunday_end > end_time_utc \n then holiday_name \n end as holiday_name_check\n from holiday_minutes\n\n-- Consolidate the holiday records that were just created\n), holiday_consolidated as (\n\n select \n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n cast({{ dbt.dateadd(\"second\", \"86400\", \"holiday_week_end\") }} as {{ dbt.type_timestamp() }}) as holiday_week_end,\n max(holiday_name_check) as holiday_name_check\n from holiday_check\n {{ dbt_utils.group_by(n=9) }}\n\n-- Since we have holiday schedules and normal schedules, we need to union them into a holistic schedule spine\n), spine_union as (\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n holiday_week_end,\n holiday_name_check\n from holiday_consolidated\n\n union all\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n null as holiday_week_start,\n null as holiday_week_end,\n null as holiday_name_check\n from calculate_schedules\n\n-- Now that we have an understanding of which weeks are holiday's let's consolidate them with non holiday weeks\n), all_periods as (\n\n select distinct\n schedule_id,\n holiday_week_start as period_start,\n holiday_week_end as period_end,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n true as is_holiday_week\n from spine_union\n where holiday_week_start is not null\n and holiday_week_end is not null\n\n union all\n\n select distinct\n schedule_id,\n valid_from as period_start,\n valid_until as period_end,\n start_time_utc,\n end_time_utc,\n cast(null as {{ dbt.type_string() }}) as holiday_name_check,\n false as is_holiday_week\n from spine_union\n\n-- We have holiday and non holiday schedules together, now let's sort them to understand the previous end and next start of neighboring schedules\n), sorted_periods as (\n\n select distinct\n *,\n lag(period_end) over (partition by schedule_id order by period_start, start_time_utc) as prev_end,\n lead(period_start) over (partition by schedule_id order by period_start, start_time_utc) as next_start\n from all_periods\n\n-- We need to adjust some non holiday schedules in order to properly fill holiday gaps in the schedules later down the transformation\n), non_holiday_period_adjustments as (\n\n select\n schedule_id, \n period_start, \n period_end,\n prev_end,\n next_start,\n -- taking first_value/last_value because prev_end and next_start are inconsistent within the schedule partitions -- they all include a record that is outside the partition. so we need to ignore those erroneous records that slip in\n coalesce(greatest(case \n when not is_holiday_week and prev_end is not null then first_value(prev_end) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_start\n end, period_start), period_start) as valid_from,\n coalesce(case \n when not is_holiday_week and next_start is not null then last_value(next_start) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_end\n end, period_end) as valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from sorted_periods\n\n-- A few window function results will be leveraged downstream. Let's generate them now.\n), gap_starter as (\n select \n *,\n max(period_end) over (partition by schedule_id) as max_valid_until,\n last_value(next_start) over (partition by schedule_id, period_start order by valid_until rows between unbounded preceding and unbounded following) as lead_next_start,\n first_value(prev_end) over (partition by schedule_id, valid_from order by start_time_utc rows between unbounded preceding and unbounded following) as first_prev_end\n from non_holiday_period_adjustments\n\n-- There may be gaps in holiday and non holiday schedules, so we need to identify where these gaps are\n), gap_adjustments as(\n\n select \n *,\n -- In order to identify the gaps we check to see if the valid_from and previous valid_until are right next to one. If we add two hours to the previous valid_until it should always be greater than the current valid_from.\n -- However, if the valid_from is greater instead then we can identify that this period has a gap that needs to be filled.\n case \n when cast({{ dbt.dateadd(\"hour\", \"2\", \"valid_until\") }} as {{ dbt.type_timestamp() }}) < cast(lead_next_start as {{ dbt.type_timestamp() }})\n then 'gap'\n when (lead_next_start is null and valid_from < max_valid_until and period_end != max_valid_until)\n then 'gap'\n else null\n end as is_schedule_gap\n\n from gap_starter\n\n-- We know where the gaps are, so now lets prime the data to fill those gaps\n), schedule_spine_primer as (\n\n select \n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n lead_next_start,\n max_valid_until,\n holiday_name_check,\n is_holiday_week,\n max(is_schedule_gap) over (partition by schedule_id, valid_until) as is_gap_period,\n lead(valid_from) over (partition by schedule_id order by valid_from, start_time_utc) as fill_primer\n from gap_adjustments\n\n-- We know the gaps and where they are, so let's fill them with the following union\n), final_union as (\n\n -- For all gap periods, let's properly create a schedule filled before the holiday.\n select \n schedule_id,\n valid_until as valid_from,\n coalesce(last_value(fill_primer) over (partition by schedule_id, valid_until order by start_time_utc rows between unbounded preceding and unbounded following), max_valid_until) as valid_until,\n start_time_utc, \n end_time_utc, \n cast(null as {{ dbt.type_string() }}) as holiday_name_check,\n false as is_holiday_week\n from schedule_spine_primer\n where is_gap_period is not null\n\n union all\n\n -- Fill all other normal schedules.\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from schedule_spine_primer\n\n-- We can finally filter out the holiday_name_check results as the gap filling properly filled in the gaps for holidays\n), final as(\n\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n is_holiday_week\n from final_union\n where holiday_name_check is null\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}, {"name": "int_zendesk__calendar_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.current_timestamp_backcompat", "macro.dbt.dateadd", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt.datediff", "macro.dbt_utils.group_by", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone", "model.zendesk_source.stg_zendesk__daylight_time", "model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday", "model.zendesk.int_zendesk__calendar_spine"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_spine.sql", "compiled": true, "compiled_code": "\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings.\n End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time)\n*/\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1671\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n-- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules.\n), schedule_holiday as ( \n\n select\n _fivetran_synced,\n cast(date_day as timestamp ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n cast(date_day as timestamp ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n holiday_id,\n holiday_name,\n schedule_id\n\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\" \n inner join __dbt__cte__int_zendesk__calendar_spine \n on holiday_start_date_at <= cast(date_day as timestamp )\n and holiday_end_date_at >= cast(date_day as timestamp )\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard schedule (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n \n current_timestamp::timestamp\n + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT schedule (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n \n current_timestamp::timestamp\n + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(\n current_timestamp::timestamp\n as date)\n\n), calculate_schedules as (\n\n select \n schedule.schedule_id,\n schedule.time_zone,\n schedule.start_time,\n schedule.end_time,\n schedule.created_at,\n schedule.schedule_name,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes_to_add,\n -- we'll use these to determine which schedule version to associate tickets with\n cast(split_timezones.valid_from as timestamp) as valid_from,\n cast(split_timezones.valid_until as timestamp) as valid_until\n\n from schedule\n left join split_timezones\n on split_timezones.time_zone = schedule.time_zone\n\n-- Now we need take holiday's into consideration and perform the following transformations to account for Holidays in existing schedules\n), holiday_start_end_times as (\n\n select\n calculate_schedules.*,\n schedule_holiday.holiday_name,\n schedule_holiday.holiday_start_date_at,\n cast(\n\n schedule_holiday.holiday_end_date_at + ((interval '1 second') * (86400))\n\n as timestamp) as holiday_end_date_at, -- add 24*60*60 seconds\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_holiday.holiday_start_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_week_start,\n cast(cast(\n\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_holiday.holiday_end_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) + ((interval '1 day') * (6))\n\n as date) as timestamp) as holiday_week_end\n from schedule_holiday\n inner join calculate_schedules\n on calculate_schedules.schedule_id = schedule_holiday.schedule_id\n and schedule_holiday.holiday_start_date_at >= calculate_schedules.valid_from \n and schedule_holiday.holiday_start_date_at < calculate_schedules.valid_until\n\n-- Let's calculate the start and end date of the Holiday in terms of minutes from Sunday (like other Zendesk schedules)\n), holiday_minutes as(\n\n select\n holiday_start_end_times.*,\n \n (\n (\n ((holiday_start_date_at)::date - (holiday_week_start)::date)\n * 24 + date_part('hour', (holiday_start_date_at)::timestamp) - date_part('hour', (holiday_week_start)::timestamp))\n * 60 + date_part('minute', (holiday_start_date_at)::timestamp) - date_part('minute', (holiday_week_start)::timestamp))\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_start,\n \n (\n (\n ((holiday_end_date_at)::date - (holiday_week_start)::date)\n * 24 + date_part('hour', (holiday_end_date_at)::timestamp) - date_part('hour', (holiday_week_start)::timestamp))\n * 60 + date_part('minute', (holiday_end_date_at)::timestamp) - date_part('minute', (holiday_week_start)::timestamp))\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_end\n from holiday_start_end_times\n left join timezone\n on timezone.time_zone = holiday_start_end_times.time_zone\n\n-- Determine which schedule days include a holiday\n), holiday_check as (\n\n select\n *,\n case when minutes_from_sunday_start < start_time_utc and minutes_from_sunday_end > end_time_utc \n then holiday_name \n end as holiday_name_check\n from holiday_minutes\n\n-- Consolidate the holiday records that were just created\n), holiday_consolidated as (\n\n select \n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n cast(\n\n holiday_week_end + ((interval '1 second') * (86400))\n\n as timestamp) as holiday_week_end,\n max(holiday_name_check) as holiday_name_check\n from holiday_check\n group by 1,2,3,4,5,6,7,8,9\n\n-- Since we have holiday schedules and normal schedules, we need to union them into a holistic schedule spine\n), spine_union as (\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n holiday_week_end,\n holiday_name_check\n from holiday_consolidated\n\n union all\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n null as holiday_week_start,\n null as holiday_week_end,\n null as holiday_name_check\n from calculate_schedules\n\n-- Now that we have an understanding of which weeks are holiday's let's consolidate them with non holiday weeks\n), all_periods as (\n\n select distinct\n schedule_id,\n holiday_week_start as period_start,\n holiday_week_end as period_end,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n true as is_holiday_week\n from spine_union\n where holiday_week_start is not null\n and holiday_week_end is not null\n\n union all\n\n select distinct\n schedule_id,\n valid_from as period_start,\n valid_until as period_end,\n start_time_utc,\n end_time_utc,\n cast(null as TEXT) as holiday_name_check,\n false as is_holiday_week\n from spine_union\n\n-- We have holiday and non holiday schedules together, now let's sort them to understand the previous end and next start of neighboring schedules\n), sorted_periods as (\n\n select distinct\n *,\n lag(period_end) over (partition by schedule_id order by period_start, start_time_utc) as prev_end,\n lead(period_start) over (partition by schedule_id order by period_start, start_time_utc) as next_start\n from all_periods\n\n-- We need to adjust some non holiday schedules in order to properly fill holiday gaps in the schedules later down the transformation\n), non_holiday_period_adjustments as (\n\n select\n schedule_id, \n period_start, \n period_end,\n prev_end,\n next_start,\n -- taking first_value/last_value because prev_end and next_start are inconsistent within the schedule partitions -- they all include a record that is outside the partition. so we need to ignore those erroneous records that slip in\n coalesce(greatest(case \n when not is_holiday_week and prev_end is not null then first_value(prev_end) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_start\n end, period_start), period_start) as valid_from,\n coalesce(case \n when not is_holiday_week and next_start is not null then last_value(next_start) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_end\n end, period_end) as valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from sorted_periods\n\n-- A few window function results will be leveraged downstream. Let's generate them now.\n), gap_starter as (\n select \n *,\n max(period_end) over (partition by schedule_id) as max_valid_until,\n last_value(next_start) over (partition by schedule_id, period_start order by valid_until rows between unbounded preceding and unbounded following) as lead_next_start,\n first_value(prev_end) over (partition by schedule_id, valid_from order by start_time_utc rows between unbounded preceding and unbounded following) as first_prev_end\n from non_holiday_period_adjustments\n\n-- There may be gaps in holiday and non holiday schedules, so we need to identify where these gaps are\n), gap_adjustments as(\n\n select \n *,\n -- In order to identify the gaps we check to see if the valid_from and previous valid_until are right next to one. If we add two hours to the previous valid_until it should always be greater than the current valid_from.\n -- However, if the valid_from is greater instead then we can identify that this period has a gap that needs to be filled.\n case \n when cast(\n\n valid_until + ((interval '1 hour') * (2))\n\n as timestamp) < cast(lead_next_start as timestamp)\n then 'gap'\n when (lead_next_start is null and valid_from < max_valid_until and period_end != max_valid_until)\n then 'gap'\n else null\n end as is_schedule_gap\n\n from gap_starter\n\n-- We know where the gaps are, so now lets prime the data to fill those gaps\n), schedule_spine_primer as (\n\n select \n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n lead_next_start,\n max_valid_until,\n holiday_name_check,\n is_holiday_week,\n max(is_schedule_gap) over (partition by schedule_id, valid_until) as is_gap_period,\n lead(valid_from) over (partition by schedule_id order by valid_from, start_time_utc) as fill_primer\n from gap_adjustments\n\n-- We know the gaps and where they are, so let's fill them with the following union\n), final_union as (\n\n -- For all gap periods, let's properly create a schedule filled before the holiday.\n select \n schedule_id,\n valid_until as valid_from,\n coalesce(last_value(fill_primer) over (partition by schedule_id, valid_until order by start_time_utc rows between unbounded preceding and unbounded following), max_valid_until) as valid_until,\n start_time_utc, \n end_time_utc, \n cast(null as TEXT) as holiday_name_check,\n false as is_holiday_week\n from schedule_spine_primer\n where is_gap_period is not null\n\n union all\n\n -- Fill all other normal schedules.\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from schedule_spine_primer\n\n-- We can finally filter out the holiday_name_check results as the gap filling properly filled in the gaps for holidays\n), final as(\n\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n is_holiday_week\n from final_union\n where holiday_name_check is null\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__calendar_spine", "sql": " __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1671\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_schedules": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_schedules", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_schedules.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_schedules.sql", "unique_id": "model.zendesk.int_zendesk__ticket_schedules", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_schedules"], "alias": "int_zendesk__ticket_schedules", "checksum": {"name": "sha256", "checksum": "cb5b35bbbe0e39c8ef56fcffc75db481246fe4863cd5c80d4a6dd43d956f93af"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1725387303.5302908, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket as (\n \n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_schedule as (\n \n select *\n from {{ ref('stg_zendesk__ticket_schedule') }}\n\n), schedule as (\n \n select *\n from {{ ref('stg_zendesk__schedule') }}\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n{% if execute %}\n\n {% set default_schedule_id_query %}\n with set_default_schedule_flag as (\n select \n row_number() over (order by created_at) = 1 as is_default_schedule,\n id\n from {{ source('zendesk','schedule') }}\n where not coalesce(_fivetran_deleted, false)\n )\n select \n id\n from set_default_schedule_flag\n where is_default_schedule\n\n {% endset %}\n\n {% set default_schedule_id = run_query(default_schedule_id_query).columns[0][0]|string %}\n\n {% endif %}\n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '{{default_schedule_id}}' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -5, 'first_schedule.created_at') }} <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , {{ fivetran_utils.timestamp_add(\"hour\", 1000, \"\" ~ dbt.current_timestamp_backcompat() ~ \"\") }} ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.timestamp_add", "macro.dbt.current_timestamp_backcompat", "macro.dbt.run_query"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_schedules.sql", "compiled": true, "compiled_code": "\n\nwith ticket as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_schedule as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule\"\n\n), schedule as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n\n\n \n\n \n\n \n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '360000310393' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and \n\n first_schedule.created_at + ((interval '1 second') * (-5))\n\n <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , \n\n \n current_timestamp::timestamp\n + ((interval '1 hour') * (1000))\n\n ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__assignee_updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__assignee_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__assignee_updates.sql", "original_file_path": "models/intermediate/int_zendesk__assignee_updates.sql", "unique_id": "model.zendesk.int_zendesk__assignee_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__assignee_updates"], "alias": "int_zendesk__assignee_updates", "checksum": {"name": "sha256", "checksum": "951ec2d4f8c9a7470a50cfc6e01838a090472a9f18fccd2dd65097d309d43aed"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.5353842, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__assignee_updates\"", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__assignee_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comment_metrics": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comment_metrics", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__comment_metrics.sql", "original_file_path": "models/intermediate/int_zendesk__comment_metrics.sql", "unique_id": "model.zendesk.int_zendesk__comment_metrics", "fqn": ["zendesk", "intermediate", "int_zendesk__comment_metrics"], "alias": "int_zendesk__comment_metrics", "checksum": {"name": "sha256", "checksum": "b82ef2f9d10d6344cd46dcce904fe263a3b5b2cc12fd9b5c662e8b477a4b5f95"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.536685, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__comment_metrics\"", "raw_code": "with ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__comment_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_group": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_group.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_group.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_group", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_group"], "alias": "int_zendesk__ticket_historical_group", "checksum": {"name": "sha256", "checksum": "7d4d72f5d6a7ef73a23ad4be966b00683532fe2a11c9729a8d640752ebee1adc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.537942, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"", "raw_code": "with ticket_group_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_group.sql", "compiled": true, "compiled_code": "with ticket_group_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__requester_updates.sql", "original_file_path": "models/intermediate/int_zendesk__requester_updates.sql", "unique_id": "model.zendesk.int_zendesk__requester_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__requester_updates"], "alias": "int_zendesk__requester_updates", "checksum": {"name": "sha256", "checksum": "b2d14b09db3cadfb56e4b3dcb55c4f9000e670e3c7c29ef89b249e626e8ba103"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.5391371, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_updates\"", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__requester_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_satisfaction.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_satisfaction"], "alias": "int_zendesk__ticket_historical_satisfaction", "checksum": {"name": "sha256", "checksum": "dce9b5b8705d72688802f99250a8f8a34b8791c3cb440f85efa11f09ebfe3e1d"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.540482, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"", "raw_code": "with satisfaction_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "compiled": true, "compiled_code": "with satisfaction_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__latest_ticket_form": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__latest_ticket_form.sql", "original_file_path": "models/intermediate/int_zendesk__latest_ticket_form.sql", "unique_id": "model.zendesk.int_zendesk__latest_ticket_form", "fqn": ["zendesk", "intermediate", "int_zendesk__latest_ticket_form"], "alias": "int_zendesk__latest_ticket_form", "checksum": {"name": "sha256", "checksum": "906a97576bff9f4fead3b0ed4632aa8a04b94f523e62b0e05425770213f78ea5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1725387303.541736, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__latest_ticket_form\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith ticket_form_history as (\n select *\n from {{ ref('stg_zendesk__ticket_form_history') }}\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__latest_ticket_form.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith ticket_form_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__ticket_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_aggregates"], "alias": "int_zendesk__ticket_aggregates", "checksum": {"name": "sha256", "checksum": "cef0c080fae7a2b361b077473aa1ccfd4bfa472469b9006038aa3866a5bf8b50"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.545011, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"", "raw_code": "with tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_tags as (\n\n select *\n from {{ ref('stg_zendesk__ticket_tag') }}\n\n), brands as (\n\n select *\n from {{ ref('stg_zendesk__brand') }}\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n {{ fivetran_utils.string_agg( 'ticket_tags.tags', \"', '\" )}} as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag", "model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_aggregates.sql", "compiled": true, "compiled_code": "with tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_tags as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag\"\n\n), brands as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n \n string_agg(ticket_tags.tags, ', ')\n\n as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__organization_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__organization_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__organization_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__organization_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__organization_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__organization_aggregates"], "alias": "int_zendesk__organization_aggregates", "checksum": {"name": "sha256", "checksum": "a16300f45d2cb0bd1c26dfec62e967a047095b92f340974bfef56178bfff6cf9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.5489228, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"", "raw_code": "with organizations as (\n select * \n from {{ ref('stg_zendesk__organization') }}\n\n--If you use organization tags this will be included, if not it will be ignored.\n{% if var('using_organization_tags', True) %}\n), organization_tags as (\n select * \n from {{ ref('stg_zendesk__organization_tag') }}\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('organization_tags.tags', \"', '\" ) }} as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n{% endif %}\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n{% if var('using_domain_names', True) %}\n), domain_names as (\n\n select *\n from {{ ref('stg_zendesk__domain_name') }}\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('domain_names.domain_name', \"', '\" ) }} as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n{% endif %}\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,tag_aggregates.organization_tags\n {% endif %}\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,domain_aggregates.domain_names\n {% endif %}\n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n left join domain_aggregates\n using(organization_id)\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n left join tag_aggregates\n using(organization_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag", "package": null, "version": null}, {"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag", "model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__organization_aggregates.sql", "compiled": true, "compiled_code": "with organizations as (\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\n\n--If you use organization tags this will be included, if not it will be ignored.\n\n), organization_tags as (\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag\"\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(organization_tags.tags, ', ')\n\n as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n\n), domain_names as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(domain_names.domain_name, ', ')\n\n as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,tag_aggregates.organization_tags\n \n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,domain_aggregates.domain_names\n \n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n left join domain_aggregates\n using(organization_id)\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n left join tag_aggregates\n using(organization_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "operation.zendesk.zendesk-on-run-start-0": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk-on-run-start-0", "resource_type": "operation", "package_name": "zendesk", "path": "hooks/zendesk-on-run-start-0.sql", "original_file_path": "./dbt_project.yml", "unique_id": "operation.zendesk.zendesk-on-run-start-0", "fqn": ["zendesk", "hooks", "zendesk-on-run-start-0"], "alias": "zendesk-on-run-start-0", "checksum": {"name": "sha256", "checksum": "2ddbab03f22ae37fb097b251c5b2d86ecdb8c41e67db2e8885707fa492b1cb51"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": ["on-run-start"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1725387303.614481, "relation_name": null, "raw_code": "{{ fivetran_utils.empty_variable_warning(\"ticket_field_history_columns\", \"zendesk_ticket_field_history\") }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.empty_variable_warning"], "nodes": []}, "compiled_path": "target/compiled/zendesk/./dbt_project.yml/hooks/zendesk-on-run-start-0.sql", "compiled": true, "compiled_code": "\n\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "index": 0}, "model.zendesk_source.stg_zendesk__user_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user_tag.sql", "original_file_path": "models/stg_zendesk__user_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag", "fqn": ["zendesk_source", "stg_zendesk__user_tag"], "alias": "stg_zendesk__user_tag", "checksum": {"name": "sha256", "checksum": "0aabe5c461e492bc7afb162a0dcb6e3334cca4c60093eb5be52b74e5dbfa429b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.9975412, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag\"", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__user_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tag_tmp')),\n staging_columns=get_user_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_tag.sql", "original_file_path": "models/stg_zendesk__ticket_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag", "fqn": ["zendesk_source", "stg_zendesk__ticket_tag"], "alias": "stg_zendesk__ticket_tag", "checksum": {"name": "sha256", "checksum": "41ea7cea80e135bf87adfff97bfadecd5c8ee0622d74f9904759305fd6cb7541"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387304.001636, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tag_tmp')),\n staging_columns=get_ticket_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n {% if target.type == 'redshift' %}\n \"tag\" as tags\n {% else %}\n tag as tags\n {% endif %}\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_tag.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n \n tag as tags\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_field_history.sql", "original_file_path": "models/stg_zendesk__ticket_field_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_field_history"], "alias": "stg_zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "5c165700bdcc50383952e4c645b4d6c42d5410205205c5de889b009dad3b0a10"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_starting_at": {"name": "valid_starting_at", "description": "The time the ticket field value became valid", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_ending_at": {"name": "valid_ending_at", "description": "The time the ticket field value became invalidated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387304.002548, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_field_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_field_history_tmp')),\n staging_columns=get_ticket_field_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as {{ dbt.type_timestamp() }}) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as {{ dbt.type_timestamp() }}) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_field_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n field_name\n \n as \n \n field_name\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n updated\n \n as \n \n updated\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n value\n \n as \n \n value\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as timestamp) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as timestamp) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule_holiday.sql", "original_file_path": "models/stg_zendesk__schedule_holiday.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday", "fqn": ["zendesk_source", "stg_zendesk__schedule_holiday"], "alias": "stg_zendesk__schedule_holiday", "checksum": {"name": "sha256", "checksum": "154109fa9fd9dc5e3b0b034929ac3e3ddb591755d52a78f64ab2bb7d6cfe2476"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Information about holidays for each specified schedule.", "columns": {"end_date_at": {"name": "end_date_at", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_id": {"name": "holiday_id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_name": {"name": "holiday_name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date_at": {"name": "start_date_at", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387304.005244, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_holiday_tmp') }}\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_holiday_tmp')),\n staging_columns=get_schedule_holiday_columns()\n )\n }}\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as {{ dbt.type_timestamp() }} ) as _fivetran_synced,\n cast(end_date as {{ dbt.type_timestamp() }} ) as holiday_end_date_at,\n cast(id as {{ dbt.type_string() }} ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as {{ dbt.type_string() }} ) as schedule_id,\n cast(start_date as {{ dbt.type_timestamp() }} ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_schedule_holiday_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule_holiday.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n end_date\n \n as \n \n end_date\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n start_date\n \n as \n \n start_date\n \n\n\n\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as timestamp ) as _fivetran_synced,\n cast(end_date as timestamp ) as holiday_end_date_at,\n cast(id as TEXT ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as TEXT ) as schedule_id,\n cast(start_date as timestamp ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__daylight_time.sql", "original_file_path": "models/stg_zendesk__daylight_time.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time", "fqn": ["zendesk_source", "stg_zendesk__daylight_time"], "alias": "stg_zendesk__daylight_time", "checksum": {"name": "sha256", "checksum": "8bc98221c9781fc37b2424b62b5d72cd62b62c53aa887be08e98114f98530df9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset_minutes": {"name": "daylight_offset_minutes", "description": "Number of **minutes** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387304.003631, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__daylight_time_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__daylight_time_tmp')),\n staging_columns=get_daylight_time_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_daylight_time_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__daylight_time.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n daylight_end_utc\n \n as \n \n daylight_end_utc\n \n, \n \n \n daylight_offset\n \n as \n \n daylight_offset\n \n, \n \n \n daylight_start_utc\n \n as \n \n daylight_start_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n year\n \n as \n \n year\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization.sql", "original_file_path": "models/stg_zendesk__organization.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization", "fqn": ["zendesk_source", "stg_zendesk__organization"], "alias": "stg_zendesk__organization", "checksum": {"name": "sha256", "checksum": "5fb51f160efdf3ffa60e0a7be33e40e4b59f814d345558631e06fcce160f6329"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"organization_id": {"name": "organization_id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.995868, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tmp')),\n staging_columns=get_organization_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__organization_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_columns", "macro.fivetran_utils.fill_staging_columns", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n shared_comments\n \n as \n \n shared_comments\n \n, \n \n \n shared_tickets\n \n as \n \n shared_tickets\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__time_zone.sql", "original_file_path": "models/stg_zendesk__time_zone.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone", "fqn": ["zendesk_source", "stg_zendesk__time_zone"], "alias": "stg_zendesk__time_zone", "checksum": {"name": "sha256", "checksum": "289f08e30f9298f5b4beed89d28c1ff6a82386ee7c9f5084499eedb8998aa137"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset_minutes": {"name": "standard_offset_minutes", "description": "Standard offset of the timezone (non-daylight savings hours) in minutes.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387304.0043159, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__time_zone_tmp') }}\n\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__time_zone_tmp')),\n staging_columns=get_time_zone_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=1) }} as {{ dbt.type_int() }} ) * 60 +\n (cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=2) }} as {{ dbt.type_int() }} ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}, {"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_time_zone_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.split_part", "macro.dbt.type_int"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__time_zone.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"\n\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n standard_offset\n \n as \n \n standard_offset\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 1\n )\n\n\n \n\n as integer ) * 60 +\n (cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 2\n )\n\n\n \n\n as integer ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__group.sql", "original_file_path": "models/stg_zendesk__group.sql", "unique_id": "model.zendesk_source.stg_zendesk__group", "fqn": ["zendesk_source", "stg_zendesk__group"], "alias": "stg_zendesk__group", "checksum": {"name": "sha256", "checksum": "21a956af3b03e9e49e9e94ade093fa716db9f061e7eb9e209c3ff7f9986b15b9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"group_id": {"name": "group_id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.994965, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__group_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__group_tmp')),\n staging_columns=get_group_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__group_tmp", "package": null, "version": null}, {"name": "stg_zendesk__group_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_group_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__group_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__group.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_comment.sql", "original_file_path": "models/stg_zendesk__ticket_comment.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment", "fqn": ["zendesk_source", "stg_zendesk__ticket_comment"], "alias": "stg_zendesk__ticket_comment", "checksum": {"name": "sha256", "checksum": "ffc2c4310aafe6b90a26e22cdab400e6d4c750faab7ea4d7519b2cf9105d3f16"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"ticket_comment_id": {"name": "ticket_comment_id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_facebook_comment": {"name": "is_facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_tweet": {"name": "is_tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_voice_comment": {"name": "is_voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.997264, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_comment_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_comment_tmp')),\n staging_columns=get_ticket_comment_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n _fivetran_deleted,\n body,\n cast(created as {{ dbt.type_timestamp() }}) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_comment_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_comment.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n body\n \n as \n \n body\n \n, \n cast(null as integer) as \n \n call_duration\n \n , \n cast(null as integer) as \n \n call_id\n \n , \n \n \n created\n \n as \n \n created\n \n, \n \n \n facebook_comment\n \n as \n \n facebook_comment\n \n, \n \n \n id\n \n as \n \n id\n \n, \n cast(null as integer) as \n \n location\n \n , \n \n \n public\n \n as \n \n public\n \n, \n cast(null as integer) as \n \n recording_url\n \n , \n cast(null as timestamp) as \n \n started_at\n \n , \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n cast(null as integer) as \n \n transcription_status\n \n , \n cast(null as integer) as \n \n transcription_text\n \n , \n cast(null as integer) as \n \n trusted\n \n , \n \n \n tweet\n \n as \n \n tweet\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n voice_comment\n \n as \n \n voice_comment\n \n, \n cast(null as integer) as \n \n voice_comment_transcription_visible\n \n \n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n _fivetran_deleted,\n body,\n cast(created as timestamp) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_schedule.sql", "original_file_path": "models/stg_zendesk__ticket_schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule", "fqn": ["zendesk_source", "stg_zendesk__ticket_schedule"], "alias": "stg_zendesk__ticket_schedule", "checksum": {"name": "sha256", "checksum": "69d32ac51b73241f990f8c1a08309cb42e79d0c1b26b99a7060353bfee88066e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387304.000641, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_schedule_tmp')),\n staging_columns=get_ticket_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(schedule_id as {{ dbt.type_string() }}) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as timestamp) as created_at,\n cast(schedule_id as TEXT) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule.sql", "original_file_path": "models/stg_zendesk__schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule", "fqn": ["zendesk_source", "stg_zendesk__schedule"], "alias": "stg_zendesk__schedule", "checksum": {"name": "sha256", "checksum": "336dabaf980af5f08c6a5f43d04cdfd00146191b0927176fe4add5f65117c673"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The support schedules created with different business hours and holidays.", "columns": {"schedule_id": {"name": "schedule_id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_name": {"name": "schedule_name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387304.0003479, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_tmp')),\n staging_columns=get_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as {{ dbt.type_string() }}) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n end_time\n \n as \n \n end_time\n \n, \n \n \n end_time_utc\n \n as \n \n end_time_utc\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n start_time\n \n as \n \n start_time\n \n, \n \n \n start_time_utc\n \n as \n \n start_time_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as TEXT) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user.sql", "original_file_path": "models/stg_zendesk__user.sql", "unique_id": "model.zendesk_source.stg_zendesk__user", "fqn": ["zendesk_source", "stg_zendesk__user"], "alias": "stg_zendesk__user", "checksum": {"name": "sha256", "checksum": "7227f84c3600cc310217efae6695bc0f6aea11b2392f5709a54d444a772a9d2c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"user_id": {"name": "user_id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active": {"name": "is_active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_suspended": {"name": "is_suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.999443, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__user_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tmp')),\n staging_columns=get_user_columns()\n )\n }}\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n _fivetran_deleted,\n cast(last_login_at as {{ dbt.type_timestamp() }}) as last_login_at,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n email,\n name,\n organization_id,\n phone,\n {% if var('internal_user_criteria', false) -%}\n case \n when role in ('admin', 'agent') then role\n when {{ var('internal_user_criteria', false) }} then 'agent'\n else role end as role,\n {% else -%}\n role,\n {% endif -%}\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__user_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n active\n \n as \n \n active\n \n, \n \n \n alias\n \n as \n \n alias\n \n, \n \n \n authenticity_token\n \n as \n \n authenticity_token\n \n, \n \n \n chat_only\n \n as \n \n chat_only\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n email\n \n as \n \n email\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n last_login_at\n \n as \n \n last_login_at\n \n, \n \n \n locale\n \n as \n \n locale\n \n, \n \n \n locale_id\n \n as \n \n locale_id\n \n, \n \n \n moderator\n \n as \n \n moderator\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n only_private_comments\n \n as \n \n only_private_comments\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n phone\n \n as \n \n phone\n \n, \n \n \n remote_photo_url\n \n as \n \n remote_photo_url\n \n, \n \n \n restricted_agent\n \n as \n \n restricted_agent\n \n, \n \n \n role\n \n as \n \n role\n \n, \n \n \n shared\n \n as \n \n shared\n \n, \n \n \n shared_agent\n \n as \n \n shared_agent\n \n, \n \n \n signature\n \n as \n \n signature\n \n, \n \n \n suspended\n \n as \n \n suspended\n \n, \n \n \n ticket_restriction\n \n as \n \n ticket_restriction\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n two_factor_auth_enabled\n \n as \n \n two_factor_auth_enabled\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n verified\n \n as \n \n verified\n \n\n\n\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n _fivetran_deleted,\n cast(last_login_at as timestamp) as last_login_at,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n email,\n name,\n organization_id,\n phone,\n role,\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__brand.sql", "original_file_path": "models/stg_zendesk__brand.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand", "fqn": ["zendesk_source", "stg_zendesk__brand"], "alias": "stg_zendesk__brand", "checksum": {"name": "sha256", "checksum": "106699200d371f2fac9fe94ce084a357331b215d4130195e1e94d2d07c6d169c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"brand_id": {"name": "brand_id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.994027, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__brand_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__brand_tmp')),\n staging_columns=get_brand_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__brand_tmp", "package": null, "version": null}, {"name": "stg_zendesk__brand_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_brand_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__brand_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__brand.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n brand_url\n \n as \n \n brand_url\n \n, \n \n \n has_help_center\n \n as \n \n has_help_center\n \n, \n \n \n help_center_state\n \n as \n \n help_center_state\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n logo_content_type\n \n as \n \n logo_content_type\n \n, \n \n \n logo_content_url\n \n as \n \n logo_content_url\n \n, \n \n \n logo_deleted\n \n as \n \n logo_deleted\n \n, \n \n \n logo_file_name\n \n as \n \n logo_file_name\n \n, \n \n \n logo_height\n \n as \n \n logo_height\n \n, \n \n \n logo_id\n \n as \n \n logo_id\n \n, \n \n \n logo_inline\n \n as \n \n logo_inline\n \n, \n \n \n logo_mapped_content_url\n \n as \n \n logo_mapped_content_url\n \n, \n \n \n logo_size\n \n as \n \n logo_size\n \n, \n \n \n logo_url\n \n as \n \n logo_url\n \n, \n \n \n logo_width\n \n as \n \n logo_width\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n subdomain\n \n as \n \n subdomain\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_form_history.sql", "original_file_path": "models/stg_zendesk__ticket_form_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_form_history"], "alias": "stg_zendesk__ticket_form_history", "checksum": {"name": "sha256", "checksum": "1e70e9a0b2dfce82e649a8a0507d59d6f3f2832429191ea67988ba0dfd1017cf"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"ticket_form_id": {"name": "ticket_form_id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387304.001166, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_form_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_form_history_tmp')),\n staging_columns=get_ticket_form_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_form_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_form_history.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n display_name\n \n as \n \n display_name\n \n, \n \n \n end_user_visible\n \n as \n \n end_user_visible\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__domain_name.sql", "original_file_path": "models/stg_zendesk__domain_name.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name", "fqn": ["zendesk_source", "stg_zendesk__domain_name"], "alias": "stg_zendesk__domain_name", "checksum": {"name": "sha256", "checksum": "8c3a4735e0cdea5a463eefc3c6820d15d622857af45dab942410dc64a0ac4bda"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.994646, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__domain_name_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__domain_name_tmp')),\n staging_columns=get_domain_name_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}, {"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_domain_name_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__domain_name.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n domain_name\n \n as \n \n domain_name\n \n, \n \n \n index\n \n as \n \n index\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization_tag.sql", "original_file_path": "models/stg_zendesk__organization_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag", "fqn": ["zendesk_source", "stg_zendesk__organization_tag"], "alias": "stg_zendesk__organization_tag", "checksum": {"name": "sha256", "checksum": "15f1f4014e4ba78ae7992f28c61e3926b7cd12c6bb32efc7b516db93c1e20d82"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.995384, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag\"", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tag_tmp')),\n staging_columns=get_organization_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket.sql", "original_file_path": "models/stg_zendesk__ticket.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket", "fqn": ["zendesk_source", "stg_zendesk__ticket"], "alias": "stg_zendesk__ticket", "checksum": {"name": "sha256", "checksum": "8a1201482d9f933a720698fa97c33d1499d5aeeaecd3706d97b3864b54eea531"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.992958, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tmp')),\n staging_columns=get_ticket_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n _fivetran_deleted,\n assignee_id,\n brand_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__ticket_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n allow_channelback\n \n as \n \n allow_channelback\n \n, \n \n \n assignee_id\n \n as \n \n assignee_id\n \n, \n \n \n brand_id\n \n as \n \n brand_id\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n description\n \n as \n \n description\n \n, \n \n \n due_at\n \n as \n \n due_at\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n forum_topic_id\n \n as \n \n forum_topic_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n has_incidents\n \n as \n \n has_incidents\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n is_public\n \n as \n \n is_public\n \n, \n \n \n merged_ticket_ids\n \n as \n \n merged_ticket_ids\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n priority\n \n as \n \n priority\n \n, \n \n \n problem_id\n \n as \n \n problem_id\n \n, \n \n \n recipient\n \n as \n \n recipient\n \n, \n \n \n requester_id\n \n as \n \n requester_id\n \n, \n \n \n status\n \n as \n \n status\n \n, \n \n \n subject\n \n as \n \n subject\n \n, \n \n \n submitter_id\n \n as \n \n submitter_id\n \n, \n cast(null as integer) as \n \n system_ccs\n \n , \n \n \n system_client\n \n as \n \n system_client\n \n, \n cast(null as TEXT) as \n \n system_ip_address\n \n , \n cast(null as integer) as \n \n system_json_email_identifier\n \n , \n cast(null as float) as \n \n system_latitude\n \n , \n cast(null as TEXT) as \n \n system_location\n \n , \n cast(null as float) as \n \n system_longitude\n \n , \n cast(null as integer) as \n \n system_machine_generated\n \n , \n cast(null as integer) as \n \n system_message_id\n \n , \n cast(null as integer) as \n \n system_raw_email_identifier\n \n , \n \n \n ticket_form_id\n \n as \n \n ticket_form_id\n \n, \n \n \n type\n \n as \n \n type\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n via_channel\n \n as \n \n via_channel\n \n, \n \n \n via_source_from_address\n \n as \n \n via_source_from_address\n \n, \n \n \n via_source_from_id\n \n as \n \n via_source_from_id\n \n, \n \n \n via_source_from_title\n \n as \n \n via_source_from_title\n \n, \n \n \n via_source_rel\n \n as \n \n via_source_rel\n \n, \n \n \n via_source_to_address\n \n as \n \n via_source_to_address\n \n, \n \n \n via_source_to_name\n \n as \n \n via_source_to_name\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n _fivetran_deleted,\n assignee_id,\n brand_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__daylight_time_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__daylight_time_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__daylight_time_tmp"], "alias": "stg_zendesk__daylight_time_tmp", "checksum": {"name": "sha256", "checksum": "01afb893cce2ef776ef8c4c64dbd2cf3e40fe1f73986fdc4b78fd99ff0948ac8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.765873, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'daylight_time')) }}\nfrom {{ source('zendesk', 'daylight_time') }} as daylight_time_table", "language": "sql", "refs": [], "sources": [["zendesk", "daylight_time"], ["zendesk", "daylight_time"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__daylight_time_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"year\",\n \"_fivetran_synced\",\n \"daylight_end_utc\",\n \"daylight_offset\",\n \"daylight_start_utc\"\nfrom \"postgres\".\"zz_zendesk\".\"daylight_time_data\" as daylight_time_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tmp"], "alias": "stg_zendesk__user_tmp", "checksum": {"name": "sha256", "checksum": "606364c3b138f68707d75a04f859f28d4b0f17f99966b27a8f6087adfa091042"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.777208, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','user')) }} \nfrom {{ source('zendesk','user') }} as user_table", "language": "sql", "refs": [], "sources": [["zendesk", "user"], ["zendesk", "user"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"active\",\n \"alias\",\n \"authenticity_token\",\n \"chat_only\",\n \"created_at\",\n \"details\",\n \"email\",\n \"external_id\",\n \"last_login_at\",\n \"locale\",\n \"locale_id\",\n \"moderator\",\n \"name\",\n \"notes\",\n \"only_private_comments\",\n \"organization_id\",\n \"phone\",\n \"remote_photo_url\",\n \"restricted_agent\",\n \"role\",\n \"shared\",\n \"shared_agent\",\n \"signature\",\n \"suspended\",\n \"ticket_restriction\",\n \"time_zone\",\n \"two_factor_auth_enabled\",\n \"updated_at\",\n \"url\",\n \"verified\" \nfrom \"postgres\".\"zz_zendesk\".\"user_data\" as user_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__group_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__group_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__group_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__group_tmp"], "alias": "stg_zendesk__group_tmp", "checksum": {"name": "sha256", "checksum": "dc91ce1ab4b5ce5fec29b74b8f999d04fa063ab6354b7387d5875997f4db7e11"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.780477, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','group')) }} \nfrom {{ source('zendesk','group') }} as group_table", "language": "sql", "refs": [], "sources": [["zendesk", "group"], ["zendesk", "group"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.group"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__group_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"created_at\",\n \"name\",\n \"updated_at\",\n \"url\" \nfrom \"postgres\".\"zz_zendesk\".\"group_data\" as group_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tmp"], "alias": "stg_zendesk__ticket_tmp", "checksum": {"name": "sha256", "checksum": "b90132a6d22e753a066ebeaaea0bc164376837b702d7886ad0d1bb1a993e6e9a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.783724, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket')) }}\nfrom {{ source('zendesk', 'ticket') }} as ticket_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket"], ["zendesk", "ticket"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"allow_channelback\",\n \"assignee_id\",\n \"brand_id\",\n \"created_at\",\n \"description\",\n \"due_at\",\n \"external_id\",\n \"forum_topic_id\",\n \"group_id\",\n \"has_incidents\",\n \"is_public\",\n \"organization_id\",\n \"priority\",\n \"problem_id\",\n \"recipient\",\n \"requester_id\",\n \"status\",\n \"subject\",\n \"submitter_id\",\n \"system_client\",\n \"ticket_form_id\",\n \"type\",\n \"updated_at\",\n \"url\",\n \"via_channel\",\n \"via_source_from_id\",\n \"via_source_from_title\",\n \"via_source_rel\",\n \"via_source_to_address\",\n \"via_source_to_name\",\n \"merged_ticket_ids\",\n \"via_source_from_address\",\n \"followup_ids\",\n \"via_followup_source_id\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_data\" as ticket_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__brand_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__brand_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__brand_tmp"], "alias": "stg_zendesk__brand_tmp", "checksum": {"name": "sha256", "checksum": "9658c9bd90fda5610067615a971eff98dc7c7b8c04827b9ab04da65f28630381"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.817656, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','brand')) }} \nfrom {{ source('zendesk','brand') }} as brand_table", "language": "sql", "refs": [], "sources": [["zendesk", "brand"], ["zendesk", "brand"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.brand"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__brand_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"brand_url\",\n \"default\",\n \"has_help_center\",\n \"help_center_state\",\n \"logo_content_type\",\n \"logo_content_url\",\n \"logo_deleted\",\n \"logo_file_name\",\n \"logo_height\",\n \"logo_id\",\n \"logo_inline\",\n \"logo_mapped_content_url\",\n \"logo_size\",\n \"logo_url\",\n \"logo_width\",\n \"name\",\n \"subdomain\",\n \"url\" \nfrom \"postgres\".\"zz_zendesk\".\"brand_data\" as brand_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tag_tmp"], "alias": "stg_zendesk__ticket_tag_tmp", "checksum": {"name": "sha256", "checksum": "d88425c9db1a948768fa8683e58654de3aab9ffc2966d829b6707c12afd94283"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.821321, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_tag')) }}\nfrom {{ source('zendesk', 'ticket_tag') }} as ticket_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_tag"], ["zendesk", "ticket_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tag_tmp.sql", "compiled": true, "compiled_code": "select \"tag\",\n \"ticket_id\",\n \"_fivetran_synced\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_tag_data\" as ticket_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_holiday_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_holiday_tmp"], "alias": "stg_zendesk__schedule_holiday_tmp", "checksum": {"name": "sha256", "checksum": "9cd5e53ebcb7f11e55f772a7826b78a7f5f6b27ba975834c28c504181a548a3b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.824669, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule_holiday')) }}\nfrom {{ source('zendesk', 'schedule_holiday') }} as schedule_holiday_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule_holiday"], ["zendesk", "schedule_holiday"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"schedule_id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_date\",\n \"name\",\n \"start_date\"\nfrom \"postgres\".\"zz_zendesk\".\"schedule_holiday_data\" as schedule_holiday_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tag_tmp"], "alias": "stg_zendesk__user_tag_tmp", "checksum": {"name": "sha256", "checksum": "7ee78431bec698af41296439428c74a8d5f8fa607c55e9b5a9b97de8b777f490"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.828193, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','user_tag')) }} \nfrom {{ source('zendesk','user_tag') }} as user_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "user_tag"], ["zendesk", "user_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nselect \"tag\",\n \"user_id\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zz_zendesk\".\"user_tag_data\" as user_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_field_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_field_history_tmp"], "alias": "stg_zendesk__ticket_field_history_tmp", "checksum": {"name": "sha256", "checksum": "9dbb7257a2998c6e0d0d7a572aa7b0d301c777cea8e7085abfa42809b9312aa7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.8324108, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_field_history')) }}\nfrom {{ source('zendesk', 'ticket_field_history') }} as ticket_field_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_field_history"], ["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "compiled": true, "compiled_code": "select \"field_name\",\n \"ticket_id\",\n \"updated\",\n \"_fivetran_synced\",\n \"user_id\",\n \"value\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_field_history_data\" as ticket_field_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_form_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_form_history_tmp"], "alias": "stg_zendesk__ticket_form_history_tmp", "checksum": {"name": "sha256", "checksum": "0e95f65a6932c12231ef9419574fd09b287a70ca20612cce228a7fb642fe1609"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.83569, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_form_history')) }}\nfrom {{ source('zendesk', 'ticket_form_history') }} as ticket_form_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_form_history"], ["zendesk", "ticket_form_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"updated_at\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"created_at\",\n \"display_name\",\n \"end_user_visible\",\n \"name\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_form_history_data\" as ticket_form_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_comment_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_comment_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_comment_tmp"], "alias": "stg_zendesk__ticket_comment_tmp", "checksum": {"name": "sha256", "checksum": "756209cf9e8c53e873cd7ac7a2dce2bdbafbd5a9d416e503c628b3ee57603c86"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.8391738, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_comment')) }}\nfrom {{ source('zendesk', 'ticket_comment') }} as ticket_comment_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_comment"], ["zendesk", "ticket_comment"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_comment_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"body\",\n \"created\",\n \"facebook_comment\",\n \"public\",\n \"ticket_id\",\n \"tweet\",\n \"user_id\",\n \"voice_comment\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_comment_data\" as ticket_comment_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tag_tmp"], "alias": "stg_zendesk__organization_tag_tmp", "checksum": {"name": "sha256", "checksum": "b917812c188e64cda849a61d784cd95507c1c9187fc0ef2e083f2eee61c58231"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.8423538, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','organization_tag')) }} \nfrom {{ source('zendesk','organization_tag') }} as organization_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization_tag"], ["zendesk", "organization_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nselect \"organization_id\",\n \"tag\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zz_zendesk\".\"organization_tag_data\" as organization_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_tmp"], "alias": "stg_zendesk__schedule_tmp", "checksum": {"name": "sha256", "checksum": "7d55acbaaa3cc93868bcd3fe4f945b1ecb4871da7b8bed7bf04714ce3fc11eef"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.845818, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule')) }}\nfrom {{ source('zendesk', 'schedule') }} as schedule_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule"], ["zendesk", "schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"end_time\",\n \"id\",\n \"start_time\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_time_utc\",\n \"name\",\n \"start_time_utc\",\n \"time_zone\",\n \"created_at\"\nfrom \"postgres\".\"zz_zendesk\".\"schedule_data\" as schedule_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tmp"], "alias": "stg_zendesk__organization_tmp", "checksum": {"name": "sha256", "checksum": "f2b39377f97f3a1a71fee168330c6971c06292c4ea702091a978eb64af9bd28f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1725387303.8500109, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'organization')) }}\nfrom {{ source('zendesk','organization') }} as organization_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization"], ["zendesk", "organization"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"created_at\",\n \"details\",\n \"external_id\",\n \"group_id\",\n \"name\",\n \"notes\",\n \"shared_comments\",\n \"shared_tickets\",\n \"updated_at\",\n \"url\"\nfrom \"postgres\".\"zz_zendesk\".\"organization_data\" as organization_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_schedule_tmp"], "alias": "stg_zendesk__ticket_schedule_tmp", "checksum": {"name": "sha256", "checksum": "59d017b8bb4285288bd47b79a1cb1afdb64faca436f52a718f6c8051d24cf6f1"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.853249, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\n{%- set source_relation = adapter.get_relation(\n database=source('zendesk', 'ticket_schedule').database,\n schema=source('zendesk', 'ticket_schedule').schema,\n identifier=source('zendesk', 'ticket_schedule').name) -%}\n\n{% set table_exists=source_relation is not none %}\n\n{% if table_exists %}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_schedule')) }}\nfrom {{ source('zendesk', 'ticket_schedule') }} as ticket_schedule_table\n\n{% else %}\n\nselect\n cast(null as {{ dbt.type_timestamp() }}) as _fivetran_synced,\n cast(null as {{ dbt.type_timestamp() }}) as created_at,\n cast(null as {{ dbt.type_int() }}) as schedule_id,\n cast(null as {{ dbt.type_int() }}) as ticket_id\n\n{% endif %}", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\n\n\nselect \"created_at\",\n \"ticket_id\",\n \"_fivetran_synced\",\n \"schedule_id\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_schedule_data\" as ticket_schedule_table\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__domain_name_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__domain_name_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__domain_name_tmp"], "alias": "stg_zendesk__domain_name_tmp", "checksum": {"name": "sha256", "checksum": "58ba804a3f1cf2e7abe29a28cc9064e9be0355e6b358cca9e714e5777ff11b4b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.858218, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'domain_name')) }} \nfrom {{ source('zendesk', 'domain_name') }} as domain_name_table", "language": "sql", "refs": [], "sources": [["zendesk", "domain_name"], ["zendesk", "domain_name"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__domain_name_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nselect \"index\",\n \"organization_id\",\n \"_fivetran_synced\",\n \"domain_name\" \nfrom \"postgres\".\"zz_zendesk\".\"domain_name_data\" as domain_name_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__time_zone_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__time_zone_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__time_zone_tmp"], "alias": "stg_zendesk__time_zone_tmp", "checksum": {"name": "sha256", "checksum": "b2a214af27259564121fd0c977a7d7388bd644f797f972ed48575a4979819ec2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1725387303.861713, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'time_zone')) }} \nfrom {{ source('zendesk', 'time_zone') }} as time_zone_table", "language": "sql", "refs": [], "sources": [["zendesk", "time_zone"], ["zendesk", "time_zone"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__time_zone_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"_fivetran_synced\",\n \"standard_offset\" \nfrom \"postgres\".\"zz_zendesk\".\"time_zone_data\" as time_zone_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef", "fqn": ["zendesk", "unique_zendesk__ticket_enriched_ticket_id"], "alias": "unique_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387303.978186, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}}, "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "fqn": ["zendesk", "not_null_zendesk__ticket_enriched_ticket_id"], "alias": "not_null_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387303.9793558, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}}, "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__sla_policies_sla_event_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__sla_policies_sla_event_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd", "fqn": ["zendesk", "unique_zendesk__sla_policies_sla_event_id"], "alias": "unique_zendesk__sla_policies_sla_event_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387303.980214, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__sla_policies"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__sla_policies_sla_event_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n sla_event_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__sla_policies\"\nwhere sla_event_id is not null\ngroup by sla_event_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "sla_event_id", "file_key_name": "models.zendesk__sla_policies", "attached_node": "model.zendesk.zendesk__sla_policies", "test_metadata": {"name": "unique", "kwargs": {"column_name": "sla_event_id", "model": "{{ get_where_subquery(ref('zendesk__sla_policies')) }}"}, "namespace": null}}, "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c", "fqn": ["zendesk", "unique_zendesk__ticket_metrics_ticket_id"], "alias": "unique_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387303.9810271, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}}, "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "fqn": ["zendesk", "not_null_zendesk__ticket_metrics_ticket_id"], "alias": "not_null_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387303.981824, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_ticket_id"], "alias": "unique_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.005754, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_ticket_id"], "alias": "not_null_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.006597, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e", "fqn": ["zendesk_source", "unique_stg_zendesk__brand_brand_id"], "alias": "unique_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.007485, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n brand_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is not null\ngroup by brand_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand", "test_metadata": {"name": "unique", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "fqn": ["zendesk_source", "not_null_stg_zendesk__brand_brand_id"], "alias": "not_null_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.0082932, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect brand_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__domain_name_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__domain_name_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3", "fqn": ["zendesk_source", "not_null_stg_zendesk__domain_name_organization_id"], "alias": "not_null_stg_zendesk__domain_name_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.009095, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__domain_name_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__domain_name", "attached_node": "model.zendesk_source.stg_zendesk__domain_name", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__domain_name')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd", "fqn": ["zendesk_source", "unique_stg_zendesk__group_group_id"], "alias": "unique_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.009891, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n group_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is not null\ngroup by group_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group", "test_metadata": {"name": "unique", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "fqn": ["zendesk_source", "not_null_stg_zendesk__group_group_id"], "alias": "not_null_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.010689, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect group_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31", "fqn": ["zendesk_source", "unique_stg_zendesk__organization_organization_id"], "alias": "unique_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.011528, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n organization_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is not null\ngroup by organization_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization", "test_metadata": {"name": "unique", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "fqn": ["zendesk_source", "not_null_stg_zendesk__organization_organization_id"], "alias": "not_null_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.012316, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.013124, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_comment_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is not null\ngroup by ticket_comment_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.0139148, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_comment_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11", "fqn": ["zendesk_source", "unique_stg_zendesk__user_user_id"], "alias": "unique_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.014712, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n user_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is not null\ngroup by user_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user", "test_metadata": {"name": "unique", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "fqn": ["zendesk_source", "not_null_stg_zendesk__user_user_id"], "alias": "not_null_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.015556, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect user_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_form_history_ticket_form_id"], "alias": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.0163488, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_form_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\nwhere ticket_form_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_form_id", "file_key_name": "models.stg_zendesk__ticket_form_history", "attached_node": "model.zendesk_source.stg_zendesk__ticket_form_history", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_form_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_form_history')) }}"}, "namespace": null}}, "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year", "resource_type": "test", "package_name": "zendesk_source", "path": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d", "fqn": ["zendesk_source", "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year"], "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9"}, "created_at": 1725387304.017161, "relation_name": null, "raw_code": "{{ dbt_utils.test_unique_combination_of_columns(**_dbt_generic_test_kwargs) }}{{ config(alias=\"dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9\") }}", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.test_unique_combination_of_columns", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "compiled": true, "compiled_code": "\n\n\n\n\n\nwith validation_errors as (\n\n select\n time_zone, year\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n group by time_zone, year\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.stg_zendesk__daylight_time", "attached_node": "model.zendesk_source.stg_zendesk__daylight_time", "test_metadata": {"name": "unique_combination_of_columns", "kwargs": {"combination_of_columns": ["time_zone", "year"], "model": "{{ get_where_subquery(ref('stg_zendesk__daylight_time')) }}"}, "namespace": "dbt_utils"}}, "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf", "fqn": ["zendesk_source", "unique_stg_zendesk__time_zone_time_zone"], "alias": "unique_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.02914, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n time_zone as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is not null\ngroup by time_zone\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone", "test_metadata": {"name": "unique", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "fqn": ["zendesk_source", "not_null_stg_zendesk__time_zone_time_zone"], "alias": "not_null_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.029954, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect time_zone\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a", "fqn": ["zendesk_source", "unique_stg_zendesk__schedule_holiday_holiday_id"], "alias": "unique_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.030755, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n holiday_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is not null\ngroup by holiday_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday", "test_metadata": {"name": "unique", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "fqn": ["zendesk_source", "not_null_stg_zendesk__schedule_holiday_holiday_id"], "alias": "not_null_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1725387304.03155, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect holiday_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}}}, "sources": {"source.zendesk_source.zendesk.ticket": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket", "fqn": ["zendesk_source", "zendesk", "ticket"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_channel": {"name": "via_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_id": {"name": "via_source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_title": {"name": "via_source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_rel": {"name": "via_source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_address": {"name": "via_source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_name": {"name": "via_source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_data\"", "created_at": 1725387304.09152}, "source.zendesk_source.zendesk.brand": {"database": "postgres", "schema": "zz_zendesk", "name": "brand", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.brand", "fqn": ["zendesk_source", "zendesk", "brand"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "brand_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"id": {"name": "id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "created_at": 1725387304.092302}, "source.zendesk_source.zendesk.domain_name": {"database": "postgres", "schema": "zz_zendesk", "name": "domain_name", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.domain_name", "fqn": ["zendesk_source", "zendesk", "domain_name"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "domain_name_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"domain_name_data\"", "created_at": 1725387304.092416}, "source.zendesk_source.zendesk.group": {"database": "postgres", "schema": "zz_zendesk", "name": "group", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.group", "fqn": ["zendesk_source", "zendesk", "group"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "group_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"group_data\"", "created_at": 1725387304.09251}, "source.zendesk_source.zendesk.organization_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization_tag", "fqn": ["zendesk_source", "zendesk", "organization_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "created_at": 1725387304.092611}, "source.zendesk_source.zendesk.organization": {"database": "postgres", "schema": "zz_zendesk", "name": "organization", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization", "fqn": ["zendesk_source", "zendesk", "organization"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique external id to associate organizations to an external record", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_names": {"name": "domain_names", "description": "An array of domain names associated with this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "notes": {"name": "notes", "description": "Any notes you have about the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "New tickets from users in this organization are automatically put in this group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_tickets": {"name": "shared_tickets", "description": "End users in this organization are able to see each other's tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_comments": {"name": "shared_comments", "description": "End users in this organization are able to see each other's comments on tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tags of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_fields": {"name": "organization_fields", "description": "Custom fields for this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_data\"", "created_at": 1725387304.0927172}, "source.zendesk_source.zendesk.ticket_comment": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_comment", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_comment", "fqn": ["zendesk_source", "zendesk", "ticket_comment"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_comment_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created": {"name": "created", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "public": {"name": "public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "facebook_comment": {"name": "facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tweet": {"name": "tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "voice_comment": {"name": "voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_comment_data\"", "created_at": 1725387304.0928159}, "source.zendesk_source.zendesk.user_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "user_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user_tag", "fqn": ["zendesk_source", "zendesk", "user_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "created_at": 1725387304.0929122}, "source.zendesk_source.zendesk.user": {"database": "postgres", "schema": "zz_zendesk", "name": "user", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user", "fqn": ["zendesk_source", "zendesk", "user"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended": {"name": "suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "created_at": 1725387304.093015}, "source.zendesk_source.zendesk.schedule": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule", "fqn": ["zendesk_source", "zendesk", "schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The support schedules created with different business hours and holidays.", "columns": {"id": {"name": "id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_data\"", "created_at": 1725387304.09311}, "source.zendesk_source.zendesk.ticket_schedule": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_schedule", "fqn": ["zendesk_source", "zendesk", "ticket_schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_schedule_data\"", "created_at": 1725387304.093195}, "source.zendesk_source.zendesk.ticket_form_history": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_form_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_form_history", "fqn": ["zendesk_source", "zendesk", "ticket_form_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_form_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_form_history_data\"", "created_at": 1725387304.0933518}, "source.zendesk_source.zendesk.ticket_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_tag", "fqn": ["zendesk_source", "zendesk", "ticket_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_tag_data\"", "created_at": 1725387304.0934389}, "source.zendesk_source.zendesk.ticket_field_history": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_field_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_field_history", "fqn": ["zendesk_source", "zendesk", "ticket_field_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_field_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated": {"name": "updated", "description": "The time the ticket field value was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"", "created_at": 1725387304.093525}, "source.zendesk_source.zendesk.daylight_time": {"database": "postgres", "schema": "zz_zendesk", "name": "daylight_time", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.daylight_time", "fqn": ["zendesk_source", "zendesk", "daylight_time"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "daylight_time_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"daylight_time_data\"", "created_at": 1725387304.0936162}, "source.zendesk_source.zendesk.time_zone": {"database": "postgres", "schema": "zz_zendesk", "name": "time_zone", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.time_zone", "fqn": ["zendesk_source", "zendesk", "time_zone"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "time_zone_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"time_zone_data\"", "created_at": 1725387304.093698}, "source.zendesk_source.zendesk.schedule_holiday": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_holiday", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule_holiday", "fqn": ["zendesk_source", "zendesk", "schedule_holiday"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_holiday_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Information about holidays for each specified schedule.", "columns": {"end_date": {"name": "end_date", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "id": {"name": "id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date": {"name": "start_date", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_holiday_data\"", "created_at": 1725387304.093796}}, "macros": {"macro.zendesk_integration_tests.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "zendesk_integration_tests", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.zendesk_integration_tests.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.0921988, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.092505, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.092706, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.09282, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.092931, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.0930471, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.09474, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.095142, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.095863, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.096, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.105292, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.105856, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.106154, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.106465, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.106905, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.107313, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.107478, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1078, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1081731, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.109094, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.10935, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1096902, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.109974, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.110406, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.110636, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.111221, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.111428, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.111555, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.111745, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.111885, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.112295, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.113131, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.113298, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.113612, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.113757, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.114039, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.114951, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }}\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\t{{ ';' if not loop.last else \"\" }}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1154718, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config.model) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.11576, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}{{ ';' if not loop.last else \"\" }}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.116197, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.116332, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.117007, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.117184, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.117318, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.117862, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.118037, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.118253, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1190112, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.122335, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.122505, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.122989, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1233761, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1244109, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.124659, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.124852, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.125029, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1251898, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1255848, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.125877, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1261702, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.126613, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.12689, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1305249, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.130701, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.131042, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1317399, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1318989, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.132067, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.133386, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.134785, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1392, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1395001, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1396759, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1397681, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.139919, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.140039, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.140247, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.141109, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.141293, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1415339, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.141941, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.148044, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.150815, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1519449, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.152237, "supported_languages": null}, "macro.dbt.get_unit_test_sql": {"name": "get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_unit_test_sql", "macro_sql": "{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_unit_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.152439, "supported_languages": null}, "macro.dbt.default__get_unit_test_sql": {"name": "default__get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_unit_test_sql", "macro_sql": "{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n-- Build actual result given inputs\nwith dbt_internal_unit_test_actual as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal(\"actual\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ main_sql }}\n ) _dbt_internal_unit_test_actual\n),\n-- Build expected result\ndbt_internal_unit_test_expected as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal(\"expected\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ expected_fixture_sql }}\n ) _dbt_internal_unit_test_expected\n)\n-- Union actual and expected results\nselect * from dbt_internal_unit_test_actual\nunion all\nselect * from dbt_internal_unit_test_expected\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.153018, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1533802, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.153744, "supported_languages": null}, "macro.dbt.materialization_unit_default": {"name": "materialization_unit_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/unit.sql", "original_file_path": "macros/materializations/tests/unit.sql", "unique_id": "macro.dbt.materialization_unit_default", "macro_sql": "{%- materialization unit, default -%}\n\n {% set relations = [] %}\n\n {% set expected_rows = config.get('expected_rows') %}\n {% set expected_sql = config.get('expected_sql') %}\n {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %}\n {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}\n {%- set column_name_to_data_types = {} -%}\n {%- for column in columns_in_relation -%}\n {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n {%- endfor -%}\n\n {% if not expected_sql %}\n {% set expected_sql = get_expected_sql(expected_rows, column_name_to_data_types) %}\n {% endif %}\n {% set unit_test_sql = get_unit_test_sql(sql, expected_sql, tested_expected_column_names) %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ unit_test_sql }}\n\n {%- endcall %}\n\n {% do adapter.drop_relation(temp_relation) %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_empty_subquery_sql", "macro.dbt.get_expected_sql", "macro.dbt.get_unit_test_sql", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.155636, "supported_languages": ["sql"]}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.160723, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.161248, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.161525, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.162836, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.16306, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.163681, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.166429, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.169318, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.170923, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1714401, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.172069, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.172294, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.173001, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.179743, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.18143, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.181704, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.182677, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1829429, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1835752, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.184217, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.185178, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.185496, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1857338, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.18606, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.186264, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.186683, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.186893, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.187155, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1873379, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.187485, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.18776, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.192704, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.198669, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.1999402, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.201132, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.201951, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.202198, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.202312, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2025962, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.202731, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.206591, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.209697, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.214772, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2156742, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.215967, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.216439, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.216631, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.216764, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.216908, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.217022, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.217175, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.21729, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.217746, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.21792, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.219116, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.219533, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.219899, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2204218, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.220673, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.220939, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.221317, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2216, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.222362, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.222712, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.222889, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2230852, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2232661, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2240288, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2252438, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.225612, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.22585, "supported_languages": null}, "macro.dbt.drop_schema_named": {"name": "drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.drop_schema_named", "macro_sql": "{% macro drop_schema_named(schema_name) %}\n {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema_named"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2261078, "supported_languages": null}, "macro.dbt.default__drop_schema_named": {"name": "default__drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.default__drop_schema_named", "macro_sql": "{% macro default__drop_schema_named(schema_name) %}\n {% set schema_relation = api.Relation.create(schema=schema_name) %}\n {{ adapter.drop_schema(schema_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.226315, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2266188, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2268171, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.227514, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.227988, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.228207, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.228483, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.228829, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.229095, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.229553, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.230005, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.230326, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2305229, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{- adapter.dispatch('drop_materialized_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.230761, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.230866, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.231128, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.231343, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.231637, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.231765, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2320268, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2321699, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.232773, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.232948, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.233217, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.233355, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.23363, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.23378, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.234802, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.23492, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.235423, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.235581, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.235719, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.236991, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.237354, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.237678, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{- adapter.dispatch('drop_table', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.237924, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.238025, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2382889, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.23843, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.238694, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.238842, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.239717, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.239939, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2403998, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.241058, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2415, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.24168, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.241853, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{- adapter.dispatch('drop_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2420921, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.242196, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.243032, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2431731, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.244307, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.244499, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.244705, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.244967, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2451048, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2454982, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.245682, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.245881, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.246314, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.246677, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.246964, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2472022, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2477472, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.249133, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.249673, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.249943, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2517672, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.253001, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.253707, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.253931, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.254153, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.254229, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2549639, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.255519, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.255741, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2560909, "supported_languages": null}, "macro.dbt.date": {"name": "date", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date.sql", "original_file_path": "macros/utils/date.sql", "unique_id": "macro.dbt.date", "macro_sql": "{% macro date(year, month, day) %}\n {{ return(adapter.dispatch('date', 'dbt') (year, month, day)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.256408, "supported_languages": null}, "macro.dbt.default__date": {"name": "default__date", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date.sql", "original_file_path": "macros/utils/date.sql", "unique_id": "macro.dbt.default__date", "macro_sql": "{% macro default__date(year, month, day) -%}\n {%- set dt = modules.datetime.date(year, month, day) -%}\n {%- set iso_8601_formatted_date = dt.strftime('%Y-%m-%d') -%}\n to_date('{{ iso_8601_formatted_date }}', 'YYYY-MM-DD')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2566772, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.25699, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.257144, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.257376, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2575, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.258414, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.258815, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.258993, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2594821, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.259729, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.259834, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.260155, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2603872, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2606, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.260674, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2609222, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.26106, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.261339, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.261469, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2620878, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.262471, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2628, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.262957, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.263229, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.263361, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.263604, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2637608, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.264029, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.26421, "supported_languages": null}, "macro.dbt.cast": {"name": "cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.cast", "macro_sql": "{% macro cast(field, type) %}\n {{ return(adapter.dispatch('cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.264508, "supported_languages": null}, "macro.dbt.default__cast": {"name": "default__cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.default__cast", "macro_sql": "{% macro default__cast(field, type) %}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2646358, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.26487, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2650392, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.265313, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.265447, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.265677, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2657762, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.266689, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.266838, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2669911, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.267137, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.267296, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.26744, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.267595, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.267765, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.267921, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.268071, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.268227, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.268366, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.268519, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.268665, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.26893, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2691221, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2693539, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.269452, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.269771, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2700741, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2702348, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.270766, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.270924, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.27114, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.271401, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2715242, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.271879, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.272108, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.272376, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.272501, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.272855, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2730298, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.273182, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.273354, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.273823, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.274029, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2741702, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.27427, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.274427, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.274503, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2746592, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.274822, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.275637, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.275768, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.275938, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.276408, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.276599, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2767308, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.276881, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.277005, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.27898, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.279139, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.279347, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.279619, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2799208, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.280223, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.280402, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.28056, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.28079, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.281315, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.281535, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.281673, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.282105, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2825541, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.282849, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.283062, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.284729, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.284845, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.285007, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2851148, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.285433, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.28561, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.28571, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.285922, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.286344, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.286557, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2867358, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.286952, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.287582, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2877629, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.287991, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.288243, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2893481, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2898638, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.290047, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2901819, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.290835, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.290994, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.291187, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2913432, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.291598, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2921238, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.29503, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.295277, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2954738, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.295716, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.295894, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2960472, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2962172, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.296448, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.29664, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2969239, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.297097, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.297246, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.2974029, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.297546, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.29774, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.297904, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.300117, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.300308, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3007512, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.300967, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3011699, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.301345, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n {{ cast('null', col['data_type']) }} as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.302458, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.302788, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.302974, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.303306, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3035269, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.304085, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.304331, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3050601, "supported_languages": null}, "macro.dbt.get_fixture_sql": {"name": "get_fixture_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_fixture_sql", "macro_sql": "{% macro get_fixture_sql(rows, column_name_to_data_types) %}\n-- Fixture for {{ model.name }}\n{% set default_row = {} %}\n\n{%- if not column_name_to_data_types -%}\n{#-- Use defer_relation IFF it is available in the manifest and 'this' is missing from the database --#}\n{%- set this_or_defer_relation = defer_relation if (defer_relation and not load_relation(this)) else this -%}\n{%- set columns_in_relation = adapter.get_columns_in_relation(this_or_defer_relation) -%}\n\n{%- set column_name_to_data_types = {} -%}\n{%- for column in columns_in_relation -%}\n{#-- This needs to be a case-insensitive comparison --#}\n{%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n{%- endfor -%}\n{%- endif -%}\n\n{%- if not column_name_to_data_types -%}\n {{ exceptions.raise_compiler_error(\"Not able to get columns for unit test '\" ~ model.name ~ \"' from relation \" ~ this ~ \" because the relation doesn't exist\") }}\n{%- endif -%}\n\n{%- for column_name, column_type in column_name_to_data_types.items() -%}\n {%- do default_row.update({column_name: (safe_cast(\"null\", column_type) | trim )}) -%}\n{%- endfor -%}\n\n\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\n{%- set default_row_copy = default_row.copy() -%}\n{%- do default_row_copy.update(formatted_row) -%}\nselect\n{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n\n{%- if (rows | length) == 0 -%}\n select\n {%- for column_name, column_value in default_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%},{%- endif %}\n {%- endfor %}\n limit 0\n{%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_relation", "macro.dbt.safe_cast", "macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.308269, "supported_languages": null}, "macro.dbt.get_expected_sql": {"name": "get_expected_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_expected_sql", "macro_sql": "{% macro get_expected_sql(rows, column_name_to_data_types) %}\n\n{%- if (rows | length) == 0 -%}\n select * from dbt_internal_unit_test_actual\n limit 0\n{%- else -%}\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\nselect\n{%- for column_name, column_value in formatted_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.308862, "supported_languages": null}, "macro.dbt.format_row": {"name": "format_row", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.format_row", "macro_sql": "\n\n{%- macro format_row(row, column_name_to_data_types) -%}\n {#-- generate case-insensitive formatted row --#}\n {% set formatted_row = {} %}\n {%- for column_name, column_value in row.items() -%}\n {% set column_name = column_name|lower %}\n\n {%- if column_name not in column_name_to_data_types %}\n {#-- if user-provided row contains column name that relation does not contain, raise an error --#}\n {% set fixture_name = \"expected output\" if model.resource_type == 'unit_test' else (\"'\" ~ model.name ~ \"'\") %}\n {{ exceptions.raise_compiler_error(\n \"Invalid column name: '\" ~ column_name ~ \"' in unit test fixture for \" ~ fixture_name ~ \".\"\n \"\\nAccepted columns for \" ~ fixture_name ~ \" are: \" ~ (column_name_to_data_types.keys()|list)\n ) }}\n {%- endif -%}\n\n {%- set column_type = column_name_to_data_types[column_name] %}\n\n {#-- sanitize column_value: wrap yaml strings in quotes, apply cast --#}\n {%- set column_value_clean = column_value -%}\n {%- if column_value is string -%}\n {%- set column_value_clean = dbt.string_literal(dbt.escape_single_quotes(column_value)) -%}\n {%- elif column_value is none -%}\n {%- set column_value_clean = 'null' -%}\n {%- endif -%}\n\n {%- set row_update = {column_name: safe_cast(column_value_clean, column_type) } -%}\n {%- do formatted_row.update(row_update) -%}\n {%- endfor -%}\n {{ return(formatted_row) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.string_literal", "macro.dbt.escape_single_quotes", "macro.dbt.safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.30991, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.311556, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3117068, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.312522, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.312962, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.313546, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3139992, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.314071, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.31458, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.314816, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.315109, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.315388, "supported_languages": null}, "macro.dbt_utils.get_url_host": {"name": "get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.get_url_host", "macro_sql": "{% macro get_url_host(field) -%}\n {{ return(adapter.dispatch('get_url_host', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_host"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.315728, "supported_languages": null}, "macro.dbt_utils.default__get_url_host": {"name": "default__get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.default__get_url_host", "macro_sql": "{% macro default__get_url_host(field) -%}\n\n{%- set parsed =\n dbt.split_part(\n dbt.split_part(\n dbt.replace(\n dbt.replace(\n dbt.replace(field, \"'android-app://'\", \"''\"\n ), \"'http://'\", \"''\"\n ), \"'https://'\", \"''\"\n ), \"'/'\", 1\n ), \"'?'\", 1\n )\n\n-%}\n\n\n {{ dbt.safe_cast(\n parsed,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part", "macro.dbt.replace", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.316183, "supported_languages": null}, "macro.dbt_utils.get_url_path": {"name": "get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.get_url_path", "macro_sql": "{% macro get_url_path(field) -%}\n {{ return(adapter.dispatch('get_url_path', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_path"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.316649, "supported_languages": null}, "macro.dbt_utils.default__get_url_path": {"name": "default__get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.default__get_url_path", "macro_sql": "{% macro default__get_url_path(field) -%}\n\n {%- set stripped_url =\n dbt.replace(\n dbt.replace(field, \"'http://'\", \"''\"), \"'https://'\", \"''\")\n -%}\n\n {%- set first_slash_pos -%}\n coalesce(\n nullif({{ dbt.position(\"'/'\", stripped_url) }}, 0),\n {{ dbt.position(\"'?'\", stripped_url) }} - 1\n )\n {%- endset -%}\n\n {%- set parsed_path =\n dbt.split_part(\n dbt.right(\n stripped_url,\n dbt.length(stripped_url) ~ \"-\" ~ first_slash_pos\n ),\n \"'?'\", 1\n )\n -%}\n\n {{ dbt.safe_cast(\n parsed_path,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.position", "macro.dbt.split_part", "macro.dbt.right", "macro.dbt.length", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.31726, "supported_languages": null}, "macro.dbt_utils.get_url_parameter": {"name": "get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.get_url_parameter", "macro_sql": "{% macro get_url_parameter(field, url_parameter) -%}\n {{ return(adapter.dispatch('get_url_parameter', 'dbt_utils')(field, url_parameter)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.317557, "supported_languages": null}, "macro.dbt_utils.default__get_url_parameter": {"name": "default__get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.default__get_url_parameter", "macro_sql": "{% macro default__get_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"='\" -%}\n\n{%- set split = dbt.split_part(dbt.split_part(field, formatted_url_parameter, 2), \"'&'\", 1) -%}\n\nnullif({{ split }},'')\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3178592, "supported_languages": null}, "macro.dbt_utils.test_fewer_rows_than": {"name": "test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.test_fewer_rows_than", "macro_sql": "{% test fewer_rows_than(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_fewer_rows_than', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_fewer_rows_than"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3190012, "supported_languages": null}, "macro.dbt_utils.default__test_fewer_rows_than": {"name": "default__test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.default__test_fewer_rows_than", "macro_sql": "{% macro default__test_fewer_rows_than(model, compare_model, group_by_columns) %}\n\n{{ config(fail_calc = 'sum(coalesce(row_count_delta, 0))') }}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in equal_rowcount. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_fewer_rows_than'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_our_model \n from {{ model }}\n {{ groupby_gb_cols }}\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_comparison_model \n from {{ compare_model }}\n {{ groupby_gb_cols }}\n\n),\ncounts as (\n\n select\n\n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_our_model,\n count_comparison_model\n from a\n full join b on \n a.id_dbtutils_test_fewer_rows_than = b.id_dbtutils_test_fewer_rows_than\n {{ join_gb_cols }}\n\n),\nfinal as (\n\n select *,\n case\n -- fail the test if we have more rows than the reference model and return the row count delta\n when count_our_model > count_comparison_model then (count_our_model - count_comparison_model)\n -- fail the test if they are the same number\n when count_our_model = count_comparison_model then 1\n -- pass the test if the delta is positive (i.e. return the number 0)\n else 0\n end as row_count_delta\n from counts\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.32006, "supported_languages": null}, "macro.dbt_utils.test_equal_rowcount": {"name": "test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.test_equal_rowcount", "macro_sql": "{% test equal_rowcount(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_equal_rowcount', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equal_rowcount"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3209221, "supported_languages": null}, "macro.dbt_utils.default__test_equal_rowcount": {"name": "default__test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.default__test_equal_rowcount", "macro_sql": "{% macro default__test_equal_rowcount(model, compare_model, group_by_columns) %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = 'sum(coalesce(diff_count, 0))') }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(', ') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in fewer_rows_than. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_equal_rowcount'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_a \n from {{ model }}\n {{groupby_gb_cols}}\n\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_b \n from {{ compare_model }}\n {{groupby_gb_cols}}\n\n),\nfinal as (\n\n select\n \n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_a,\n count_b,\n abs(count_a - count_b) as diff_count\n\n from a\n full join b\n on\n a.id_dbtutils_test_equal_rowcount = b.id_dbtutils_test_equal_rowcount\n {{join_gb_cols}}\n\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.321939, "supported_languages": null}, "macro.dbt_utils.test_relationships_where": {"name": "test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.test_relationships_where", "macro_sql": "{% test relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n {{ return(adapter.dispatch('test_relationships_where', 'dbt_utils')(model, column_name, to, field, from_condition, to_condition)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_relationships_where"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.322588, "supported_languages": null}, "macro.dbt_utils.default__test_relationships_where": {"name": "default__test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.default__test_relationships_where", "macro_sql": "{% macro default__test_relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n\n{# T-SQL has no boolean data type so we use 1=1 which returns TRUE #}\n{# ref https://stackoverflow.com/a/7170753/3842610 #}\n\nwith left_table as (\n\n select\n {{column_name}} as id\n\n from {{model}}\n\n where {{column_name}} is not null\n and {{from_condition}}\n\n),\n\nright_table as (\n\n select\n {{field}} as id\n\n from {{to}}\n\n where {{field}} is not null\n and {{to_condition}}\n\n),\n\nexceptions as (\n\n select\n left_table.id,\n right_table.id as right_id\n\n from left_table\n\n left join right_table\n on left_table.id = right_table.id\n\n where right_table.id is null\n\n)\n\nselect * from exceptions\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.322912, "supported_languages": null}, "macro.dbt_utils.test_recency": {"name": "test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.test_recency", "macro_sql": "{% test recency(model, field, datepart, interval, ignore_time_component=False, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_recency', 'dbt_utils')(model, field, datepart, interval, ignore_time_component, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_recency"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.32361, "supported_languages": null}, "macro.dbt_utils.default__test_recency": {"name": "default__test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.default__test_recency", "macro_sql": "{% macro default__test_recency(model, field, datepart, interval, ignore_time_component, group_by_columns) %}\n\n{% set threshold = 'cast(' ~ dbt.dateadd(datepart, interval * -1, dbt.current_timestamp()) ~ ' as ' ~ ('date' if ignore_time_component else dbt.type_timestamp()) ~ ')' %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nwith recency as (\n\n select \n\n {{ select_gb_cols }}\n {% if ignore_time_component %}\n cast(max({{ field }}) as date) as most_recent\n {%- else %}\n max({{ field }}) as most_recent\n {%- endif %}\n\n from {{ model }}\n\n {{ groupby_gb_cols }}\n\n)\n\nselect\n\n {{ select_gb_cols }}\n most_recent,\n {{ threshold }} as threshold\n\nfrom recency\nwhere most_recent < {{ threshold }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.current_timestamp", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.324407, "supported_languages": null}, "macro.dbt_utils.test_not_constant": {"name": "test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.test_not_constant", "macro_sql": "{% test not_constant(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_constant', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_constant"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3249102, "supported_languages": null}, "macro.dbt_utils.default__test_not_constant": {"name": "default__test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.default__test_not_constant", "macro_sql": "{% macro default__test_not_constant(model, column_name, group_by_columns) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nselect\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count(distinct {{ column_name }}) as filler_column\n\nfrom {{ model }}\n\n {{groupby_gb_cols}}\n\nhaving count(distinct {{ column_name }}) = 1\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.32538, "supported_languages": null}, "macro.dbt_utils.test_accepted_range": {"name": "test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.test_accepted_range", "macro_sql": "{% test accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n {{ return(adapter.dispatch('test_accepted_range', 'dbt_utils')(model, column_name, min_value, max_value, inclusive)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_accepted_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.325978, "supported_languages": null}, "macro.dbt_utils.default__test_accepted_range": {"name": "default__test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.default__test_accepted_range", "macro_sql": "{% macro default__test_accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n\nwith meet_condition as(\n select *\n from {{ model }}\n),\n\nvalidation_errors as (\n select *\n from meet_condition\n where\n -- never true, defaults to an empty result set. Exists to ensure any combo of the `or` clauses below succeeds\n 1 = 2\n\n {%- if min_value is not none %}\n -- records with a value >= min_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} > {{- \"=\" if inclusive }} {{ min_value }}\n {%- endif %}\n\n {%- if max_value is not none %}\n -- records with a value <= max_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} < {{- \"=\" if inclusive }} {{ max_value }}\n {%- endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.326426, "supported_languages": null}, "macro.dbt_utils.test_not_accepted_values": {"name": "test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.test_not_accepted_values", "macro_sql": "{% test not_accepted_values(model, column_name, values, quote=True) %}\n {{ return(adapter.dispatch('test_not_accepted_values', 'dbt_utils')(model, column_name, values, quote)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.326957, "supported_languages": null}, "macro.dbt_utils.default__test_not_accepted_values": {"name": "default__test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.default__test_not_accepted_values", "macro_sql": "{% macro default__test_not_accepted_values(model, column_name, values, quote=True) %}\nwith all_values as (\n\n select distinct\n {{ column_name }} as value_field\n\n from {{ model }}\n\n),\n\nvalidation_errors as (\n\n select\n value_field\n\n from all_values\n where value_field in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n )\n\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.327321, "supported_languages": null}, "macro.dbt_utils.test_at_least_one": {"name": "test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.test_at_least_one", "macro_sql": "{% test at_least_one(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_at_least_one', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_at_least_one"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.327981, "supported_languages": null}, "macro.dbt_utils.default__test_at_least_one": {"name": "default__test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.default__test_at_least_one", "macro_sql": "{% macro default__test_at_least_one(model, column_name, group_by_columns) %}\n\n{% set pruned_cols = [column_name] %}\n\n{% if group_by_columns|length() > 0 %}\n\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n {% set pruned_cols = group_by_columns %}\n\n {% if column_name not in pruned_cols %}\n {% do pruned_cols.append(column_name) %}\n {% endif %}\n\n{% endif %}\n\n{% set select_pruned_cols = pruned_cols|join(' ,') %}\n\nselect *\nfrom (\n with pruned_rows as (\n select\n {{ select_pruned_cols }}\n from {{ model }}\n {% if group_by_columns|length() == 0 %}\n where {{ column_name }} is not null\n limit 1\n {% endif %}\n )\n select\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count({{ column_name }}) as filler_column\n\n from pruned_rows\n\n {{groupby_gb_cols}}\n\n having count({{ column_name }}) = 0\n\n) validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.328851, "supported_languages": null}, "macro.dbt_utils.test_unique_combination_of_columns": {"name": "test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.test_unique_combination_of_columns", "macro_sql": "{% test unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n {{ return(adapter.dispatch('test_unique_combination_of_columns', 'dbt_utils')(model, combination_of_columns, quote_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_unique_combination_of_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.329468, "supported_languages": null}, "macro.dbt_utils.default__test_unique_combination_of_columns": {"name": "default__test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.default__test_unique_combination_of_columns", "macro_sql": "{% macro default__test_unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n\n{% if not quote_columns %}\n {%- set column_list=combination_of_columns %}\n{% elif quote_columns %}\n {%- set column_list=[] %}\n {% for column in combination_of_columns -%}\n {% set column_list = column_list.append( adapter.quote(column) ) %}\n {%- endfor %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`quote_columns` argument for unique_combination_of_columns test must be one of [True, False] Got: '\" ~ quote ~\"'.'\"\n ) }}\n{% endif %}\n\n{%- set columns_csv=column_list | join(', ') %}\n\n\nwith validation_errors as (\n\n select\n {{ columns_csv }}\n from {{ model }}\n group by {{ columns_csv }}\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.330069, "supported_languages": null}, "macro.dbt_utils.test_cardinality_equality": {"name": "test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.test_cardinality_equality", "macro_sql": "{% test cardinality_equality(model, column_name, to, field) %}\n {{ return(adapter.dispatch('test_cardinality_equality', 'dbt_utils')(model, column_name, to, field)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_cardinality_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.330653, "supported_languages": null}, "macro.dbt_utils.default__test_cardinality_equality": {"name": "default__test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.default__test_cardinality_equality", "macro_sql": "{% macro default__test_cardinality_equality(model, column_name, to, field) %}\n\n{# T-SQL does not let you use numbers as aliases for columns #}\n{# Thus, no \"GROUP BY 1\" #}\n\nwith table_a as (\nselect\n {{ column_name }},\n count(*) as num_rows\nfrom {{ model }}\ngroup by {{ column_name }}\n),\n\ntable_b as (\nselect\n {{ field }},\n count(*) as num_rows\nfrom {{ to }}\ngroup by {{ field }}\n),\n\nexcept_a as (\n select *\n from table_a\n {{ dbt.except() }}\n select *\n from table_b\n),\n\nexcept_b as (\n select *\n from table_b\n {{ dbt.except() }}\n select *\n from table_a\n),\n\nunioned as (\n select *\n from except_a\n union all\n select *\n from except_b\n)\n\nselect *\nfrom unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.331033, "supported_languages": null}, "macro.dbt_utils.test_expression_is_true": {"name": "test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.test_expression_is_true", "macro_sql": "{% test expression_is_true(model, expression, column_name=None) %}\n {{ return(adapter.dispatch('test_expression_is_true', 'dbt_utils')(model, expression, column_name)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_expression_is_true"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.331445, "supported_languages": null}, "macro.dbt_utils.default__test_expression_is_true": {"name": "default__test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.default__test_expression_is_true", "macro_sql": "{% macro default__test_expression_is_true(model, expression, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else \"1\" %}\n\nselect\n {{ column_list }}\nfrom {{ model }}\n{% if column_name is none %}\nwhere not({{ expression }})\n{%- else %}\nwhere not({{ column_name }} {{ expression }})\n{%- endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3317912, "supported_languages": null}, "macro.dbt_utils.test_not_null_proportion": {"name": "test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.test_not_null_proportion", "macro_sql": "{% macro test_not_null_proportion(model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_null_proportion', 'dbt_utils')(model, group_by_columns, **kwargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_null_proportion"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.332454, "supported_languages": null}, "macro.dbt_utils.default__test_not_null_proportion": {"name": "default__test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.default__test_not_null_proportion", "macro_sql": "{% macro default__test_not_null_proportion(model, group_by_columns) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('arg')) %}\n{% set at_least = kwargs.get('at_least', kwargs.get('arg')) %}\n{% set at_most = kwargs.get('at_most', kwargs.get('arg', 1)) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith validation as (\n select\n {{select_gb_cols}}\n sum(case when {{ column_name }} is null then 0 else 1 end) / cast(count(*) as {{ dbt.type_numeric() }}) as not_null_proportion\n from {{ model }}\n {{groupby_gb_cols}}\n),\nvalidation_errors as (\n select\n {{select_gb_cols}}\n not_null_proportion\n from validation\n where not_null_proportion < {{ at_least }} or not_null_proportion > {{ at_most }}\n)\nselect\n *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.333269, "supported_languages": null}, "macro.dbt_utils.test_sequential_values": {"name": "test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.test_sequential_values", "macro_sql": "{% test sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n {{ return(adapter.dispatch('test_sequential_values', 'dbt_utils')(model, column_name, interval, datepart, group_by_columns)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_sequential_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3341608, "supported_languages": null}, "macro.dbt_utils.default__test_sequential_values": {"name": "default__test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.default__test_sequential_values", "macro_sql": "{% macro default__test_sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n{% set previous_column_name = \"previous_\" ~ dbt_utils.slugify(column_name) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(',') + ', ' %}\n {% set partition_gb_cols = 'partition by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith windowed as (\n\n select\n {{ select_gb_cols }}\n {{ column_name }},\n lag({{ column_name }}) over (\n {{partition_gb_cols}}\n order by {{ column_name }}\n ) as {{ previous_column_name }}\n from {{ model }}\n),\n\nvalidation_errors as (\n select\n *\n from windowed\n {% if datepart %}\n where not(cast({{ column_name }} as {{ dbt.type_timestamp() }})= cast({{ dbt.dateadd(datepart, interval, previous_column_name) }} as {{ dbt.type_timestamp() }}))\n {% else %}\n where not({{ column_name }} = {{ previous_column_name }} + {{ interval }})\n {% endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.slugify", "macro.dbt.type_timestamp", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.335024, "supported_languages": null}, "macro.dbt_utils.test_equality": {"name": "test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.test_equality", "macro_sql": "{% test equality(model, compare_model, compare_columns=None, exclude_columns=None, precision = None) %}\n {{ return(adapter.dispatch('test_equality', 'dbt_utils')(model, compare_model, compare_columns, exclude_columns, precision)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.336994, "supported_languages": null}, "macro.dbt_utils.default__test_equality": {"name": "default__test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.default__test_equality", "macro_sql": "{% macro default__test_equality(model, compare_model, compare_columns=None, exclude_columns=None, precision = None) %}\n\n{%- if compare_columns and exclude_columns -%}\n {{ exceptions.raise_compiler_error(\"Both a compare and an ignore list were provided to the `equality` macro. Only one is allowed\") }}\n{%- endif -%}\n\n{% set set_diff %}\n count(*) + coalesce(abs(\n sum(case when which_diff = 'a_minus_b' then 1 else 0 end) -\n sum(case when which_diff = 'b_minus_a' then 1 else 0 end)\n ), 0)\n{% endset %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = set_diff) }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n\n\n-- setup\n{%- do dbt_utils._is_relation(model, 'test_equality') -%}\n\n{# Ensure there are no extra columns in the compare_model vs model #}\n{%- if not compare_columns -%}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- do dbt_utils._is_ephemeral(compare_model, 'test_equality') -%}\n\n {%- set model_columns = adapter.get_columns_in_relation(model) -%}\n {%- set compare_model_columns = adapter.get_columns_in_relation(compare_model) -%}\n\n\n {%- if exclude_columns -%}\n {#-- Lower case ignore columns for easier comparison --#}\n {%- set exclude_columns = exclude_columns | map(\"lower\") | list %}\n\n {# Filter out the excluded columns #}\n {%- set include_columns = [] %}\n {%- set include_model_columns = [] %}\n {%- for column in model_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n {%- for column in compare_model_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_model_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n\n {%- set compare_columns_set = set(include_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- set compare_model_columns_set = set(include_model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- else -%}\n {%- set compare_columns_set = set(model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- set compare_model_columns_set = set(compare_model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- endif -%}\n\n {% if compare_columns_set != compare_model_columns_set %}\n {{ exceptions.raise_compiler_error(compare_model ~\" has less columns than \" ~ model ~ \", please ensure they have the same columns or use the `compare_columns` or `exclude_columns` arguments to subset them.\") }}\n {% endif %}\n\n\n{% endif %}\n\n{%- if not precision -%}\n {%- if not compare_columns -%}\n {# \n You cannot get the columns in an ephemeral model (due to not existing in the information schema),\n so if the user does not provide an explicit list of columns we must error in the case it is ephemeral\n #}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- set compare_columns = adapter.get_columns_in_relation(model)-%}\n\n {%- if exclude_columns -%}\n {#-- Lower case ignore columns for easier comparison --#}\n {%- set exclude_columns = exclude_columns | map(\"lower\") | list %}\n\n {# Filter out the excluded columns #}\n {%- set include_columns = [] %}\n {%- for column in compare_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n\n {%- set compare_columns = include_columns | map(attribute='quoted') %}\n {%- else -%} {# Compare columns provided #}\n {%- set compare_columns = compare_columns | map(attribute='quoted') %}\n {%- endif -%}\n {%- endif -%}\n\n {% set compare_cols_csv = compare_columns | join(', ') %}\n\n{% else %} {# Precision required #}\n {#-\n If rounding is required, we need to get the types, so it cannot be ephemeral even if they provide column names\n -#}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- set columns = adapter.get_columns_in_relation(model) -%}\n\n {% set columns_list = [] %}\n {%- for col in columns -%}\n {%- if (\n (col.name|lower in compare_columns|map('lower') or not compare_columns) and\n (col.name|lower not in exclude_columns|map('lower') or not exclude_columns)\n ) -%}\n {# Databricks double type is not picked up by any number type checks in dbt #}\n {%- if col.is_float() or col.is_numeric() or col.data_type == 'double' -%}\n {# Cast is required due to postgres not having round for a double precision number #}\n {%- do columns_list.append('round(cast(' ~ col.quoted ~ ' as ' ~ dbt.type_numeric() ~ '),' ~ precision ~ ') as ' ~ col.quoted) -%}\n {%- else -%} {# Non-numeric type #}\n {%- do columns_list.append(col.quoted) -%}\n {%- endif -%}\n {% endif %}\n {%- endfor -%}\n\n {% set compare_cols_csv = columns_list | join(', ') %}\n\n{% endif %}\n\nwith a as (\n\n select * from {{ model }}\n\n),\n\nb as (\n\n select * from {{ compare_model }}\n\n),\n\na_minus_b as (\n\n select {{compare_cols_csv}} from a\n {{ dbt.except() }}\n select {{compare_cols_csv}} from b\n\n),\n\nb_minus_a as (\n\n select {{compare_cols_csv}} from b\n {{ dbt.except() }}\n select {{compare_cols_csv}} from a\n\n),\n\nunioned as (\n\n select 'a_minus_b' as which_diff, a_minus_b.* from a_minus_b\n union all\n select 'b_minus_a' as which_diff, b_minus_a.* from b_minus_a\n\n)\n\nselect * from unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.type_numeric", "macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.340779, "supported_languages": null}, "macro.dbt_utils.test_not_empty_string": {"name": "test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.test_not_empty_string", "macro_sql": "{% test not_empty_string(model, column_name, trim_whitespace=true) %}\n\n {{ return(adapter.dispatch('test_not_empty_string', 'dbt_utils')(model, column_name, trim_whitespace)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_empty_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.341303, "supported_languages": null}, "macro.dbt_utils.default__test_not_empty_string": {"name": "default__test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.default__test_not_empty_string", "macro_sql": "{% macro default__test_not_empty_string(model, column_name, trim_whitespace=true) %}\n\n with\n \n all_values as (\n\n select \n\n\n {% if trim_whitespace == true -%}\n\n trim({{ column_name }}) as {{ column_name }}\n\n {%- else -%}\n\n {{ column_name }}\n\n {%- endif %}\n \n from {{ model }}\n\n ),\n\n errors as (\n\n select * from all_values\n where {{ column_name }} = ''\n\n )\n\n select * from errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.341604, "supported_languages": null}, "macro.dbt_utils.test_mutually_exclusive_ranges": {"name": "test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.test_mutually_exclusive_ranges", "macro_sql": "{% test mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n {{ return(adapter.dispatch('test_mutually_exclusive_ranges', 'dbt_utils')(model, lower_bound_column, upper_bound_column, partition_by, gaps, zero_length_range_allowed)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_mutually_exclusive_ranges"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3453262, "supported_languages": null}, "macro.dbt_utils.default__test_mutually_exclusive_ranges": {"name": "default__test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.default__test_mutually_exclusive_ranges", "macro_sql": "{% macro default__test_mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n{% if gaps == 'not_allowed' %}\n {% set allow_gaps_operator='=' %}\n {% set allow_gaps_operator_in_words='equal_to' %}\n{% elif gaps == 'allowed' %}\n {% set allow_gaps_operator='<=' %}\n {% set allow_gaps_operator_in_words='less_than_or_equal_to' %}\n{% elif gaps == 'required' %}\n {% set allow_gaps_operator='<' %}\n {% set allow_gaps_operator_in_words='less_than' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`gaps` argument for mutually_exclusive_ranges test must be one of ['not_allowed', 'allowed', 'required'] Got: '\" ~ gaps ~\"'.'\"\n ) }}\n{% endif %}\n{% if not zero_length_range_allowed %}\n {% set allow_zero_length_operator='<' %}\n {% set allow_zero_length_operator_in_words='less_than' %}\n{% elif zero_length_range_allowed %}\n {% set allow_zero_length_operator='<=' %}\n {% set allow_zero_length_operator_in_words='less_than_or_equal_to' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`zero_length_range_allowed` argument for mutually_exclusive_ranges test must be one of [true, false] Got: '\" ~ zero_length_range_allowed ~\"'.'\"\n ) }}\n{% endif %}\n\n{% set partition_clause=\"partition by \" ~ partition_by if partition_by else '' %}\n\nwith window_functions as (\n\n select\n {% if partition_by %}\n {{ partition_by }} as partition_by_col,\n {% endif %}\n {{ lower_bound_column }} as lower_bound,\n {{ upper_bound_column }} as upper_bound,\n\n lead({{ lower_bound_column }}) over (\n {{ partition_clause }}\n order by {{ lower_bound_column }}, {{ upper_bound_column }}\n ) as next_lower_bound,\n\n row_number() over (\n {{ partition_clause }}\n order by {{ lower_bound_column }} desc, {{ upper_bound_column }} desc\n ) = 1 as is_last_record\n\n from {{ model }}\n\n),\n\ncalc as (\n -- We want to return records where one of our assumptions fails, so we'll use\n -- the `not` function with `and` statements so we can write our assumptions more cleanly\n select\n *,\n\n -- For each record: lower_bound should be < upper_bound.\n -- Coalesce it to return an error on the null case (implicit assumption\n -- these columns are not_null)\n coalesce(\n lower_bound {{ allow_zero_length_operator }} upper_bound,\n false\n ) as lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound,\n\n -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound.\n -- Coalesce it to handle null cases for the last record.\n coalesce(\n upper_bound {{ allow_gaps_operator }} next_lower_bound,\n is_last_record,\n false\n ) as upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n\n from window_functions\n\n),\n\nvalidation_errors as (\n\n select\n *\n from calc\n\n where not(\n -- THE FOLLOWING SHOULD BE TRUE --\n lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound\n and upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n )\n)\n\nselect * from validation_errors\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3467488, "supported_languages": null}, "macro.dbt_utils.pretty_log_format": {"name": "pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.pretty_log_format", "macro_sql": "{% macro pretty_log_format(message) %}\n {{ return(adapter.dispatch('pretty_log_format', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3470101, "supported_languages": null}, "macro.dbt_utils.default__pretty_log_format": {"name": "default__pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.default__pretty_log_format", "macro_sql": "{% macro default__pretty_log_format(message) %}\n {{ return( dbt_utils.pretty_time() ~ ' + ' ~ message) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.347168, "supported_languages": null}, "macro.dbt_utils._is_relation": {"name": "_is_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_relation.sql", "original_file_path": "macros/jinja_helpers/_is_relation.sql", "unique_id": "macro.dbt_utils._is_relation", "macro_sql": "{% macro _is_relation(obj, macro) %}\n {%- if not (obj is mapping and obj.get('metadata', {}).get('type', '').endswith('Relation')) -%}\n {%- do exceptions.raise_compiler_error(\"Macro \" ~ macro ~ \" expected a Relation but received the value: \" ~ obj) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.34759, "supported_languages": null}, "macro.dbt_utils.pretty_time": {"name": "pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.pretty_time", "macro_sql": "{% macro pretty_time(format='%H:%M:%S') %}\n {{ return(adapter.dispatch('pretty_time', 'dbt_utils')(format)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.347851, "supported_languages": null}, "macro.dbt_utils.default__pretty_time": {"name": "default__pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.default__pretty_time", "macro_sql": "{% macro default__pretty_time(format='%H:%M:%S') %}\n {{ return(modules.datetime.datetime.now().strftime(format)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.348032, "supported_languages": null}, "macro.dbt_utils.log_info": {"name": "log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.log_info", "macro_sql": "{% macro log_info(message) %}\n {{ return(adapter.dispatch('log_info', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__log_info"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.348274, "supported_languages": null}, "macro.dbt_utils.default__log_info": {"name": "default__log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.default__log_info", "macro_sql": "{% macro default__log_info(message) %}\n {{ log(dbt_utils.pretty_log_format(message), info=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.348435, "supported_languages": null}, "macro.dbt_utils.slugify": {"name": "slugify", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/slugify.sql", "original_file_path": "macros/jinja_helpers/slugify.sql", "unique_id": "macro.dbt_utils.slugify", "macro_sql": "{% macro slugify(string) %}\n\n{% if not string %}\n{{ return('') }}\n{% endif %}\n\n{#- Lower case the string -#}\n{% set string = string | lower %}\n{#- Replace spaces and dashes with underscores -#}\n{% set string = modules.re.sub('[ -]+', '_', string) %}\n{#- Only take letters, numbers, and underscores -#}\n{% set string = modules.re.sub('[^a-z0-9_]+', '', string) %}\n{#- Prepends \"_\" if string begins with a number -#}\n{% set string = modules.re.sub('^[0-9]', '_' + string[0], string) %}\n\n{{ return(string) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.349217, "supported_languages": null}, "macro.dbt_utils._is_ephemeral": {"name": "_is_ephemeral", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_ephemeral.sql", "original_file_path": "macros/jinja_helpers/_is_ephemeral.sql", "unique_id": "macro.dbt_utils._is_ephemeral", "macro_sql": "{% macro _is_ephemeral(obj, macro) %}\n {%- if obj.is_cte -%}\n {% set ephemeral_prefix = api.Relation.add_ephemeral_prefix('') %}\n {% if obj.name.startswith(ephemeral_prefix) %}\n {% set model_name = obj.name[(ephemeral_prefix|length):] %}\n {% else %}\n {% set model_name = obj.name %}\n {%- endif -%}\n {% set error_message %}\nThe `{{ macro }}` macro cannot be used with ephemeral models, as it relies on the information schema.\n\n`{{ model_name }}` is an ephemeral model. Consider making it a view or table instead.\n {% endset %}\n {%- do exceptions.raise_compiler_error(error_message) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3500412, "supported_languages": null}, "macro.dbt_utils.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_utils')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.350736, "supported_languages": null}, "macro.dbt_utils.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.351296, "supported_languages": null}, "macro.dbt_utils.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_utils')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.351521, "supported_languages": null}, "macro.dbt_utils.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{dbt_utils.generate_series(\n dbt_utils.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.generate_series", "macro.dbt_utils.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.351862, "supported_languages": null}, "macro.dbt_utils.safe_subtract": {"name": "safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.safe_subtract", "macro_sql": "{%- macro safe_subtract(field_list) -%}\n {{ return(adapter.dispatch('safe_subtract', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_subtract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.352229, "supported_languages": null}, "macro.dbt_utils.default__safe_subtract": {"name": "default__safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.default__safe_subtract", "macro_sql": "\n\n{%- macro default__safe_subtract(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_subtract` macro takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' -\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.352813, "supported_languages": null}, "macro.dbt_utils.nullcheck_table": {"name": "nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.nullcheck_table", "macro_sql": "{% macro nullcheck_table(relation) %}\n {{ return(adapter.dispatch('nullcheck_table', 'dbt_utils')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.353112, "supported_languages": null}, "macro.dbt_utils.default__nullcheck_table": {"name": "default__nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.default__nullcheck_table", "macro_sql": "{% macro default__nullcheck_table(relation) %}\n\n {%- do dbt_utils._is_relation(relation, 'nullcheck_table') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'nullcheck_table') -%}\n {% set cols = adapter.get_columns_in_relation(relation) %}\n\n select {{ dbt_utils.nullcheck(cols) }}\n from {{relation}}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.353436, "supported_languages": null}, "macro.dbt_utils.get_relations_by_pattern": {"name": "get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.get_relations_by_pattern", "macro_sql": "{% macro get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_pattern', 'dbt_utils')(schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3540952, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_pattern": {"name": "default__get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_pattern", "macro_sql": "{% macro default__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.354916, "supported_languages": null}, "macro.dbt_utils.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.355846, "supported_languages": null}, "macro.dbt_utils.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.356249, "supported_languages": null}, "macro.dbt_utils.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.356436, "supported_languages": null}, "macro.dbt_utils.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_utils.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.35693, "supported_languages": null}, "macro.dbt_utils.get_relations_by_prefix": {"name": "get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.get_relations_by_prefix", "macro_sql": "{% macro get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_prefix', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.357592, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_prefix": {"name": "default__get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_prefix", "macro_sql": "{% macro default__get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_prefix_sql(schema, prefix, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3583949, "supported_languages": null}, "macro.dbt_utils.get_tables_by_prefix_sql": {"name": "get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_prefix_sql", "macro_sql": "{% macro get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_prefix_sql', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3587892, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_prefix_sql": {"name": "default__get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_prefix_sql", "macro_sql": "{% macro default__get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(\n schema_pattern = schema,\n table_pattern = prefix ~ '%',\n exclude = exclude,\n database = database\n ) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.359057, "supported_languages": null}, "macro.dbt_utils.star": {"name": "star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.star", "macro_sql": "{% macro star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {{ return(adapter.dispatch('star', 'dbt_utils')(from, relation_alias, except, prefix, suffix, quote_identifiers)) }}\r\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__star"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.360283, "supported_languages": null}, "macro.dbt_utils.default__star": {"name": "default__star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.default__star", "macro_sql": "{% macro default__star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {%- do dbt_utils._is_relation(from, 'star') -%}\r\n {%- do dbt_utils._is_ephemeral(from, 'star') -%}\r\n\r\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\r\n {%- if not execute -%}\r\n {% do return('*') %}\r\n {%- endif -%}\r\n\r\n {% set cols = dbt_utils.get_filtered_columns_in_relation(from, except) %}\r\n\r\n {%- if cols|length <= 0 -%}\r\n {% if flags.WHICH == 'compile' %}\r\n {% set response %}\r\n*\r\n/* No columns were returned. Maybe the relation doesn't exist yet \r\nor all columns were excluded. This star is only output during \r\ndbt compile, and exists to keep SQLFluff happy. */\r\n {% endset %}\r\n {% do return(response) %}\r\n {% else %}\r\n {% do return(\"/* no columns returned from star() macro */\") %}\r\n {% endif %}\r\n {%- else -%}\r\n {%- for col in cols %}\r\n {%- if relation_alias %}{{ relation_alias }}.{% else %}{%- endif -%}\r\n {%- if quote_identifiers -%}\r\n {{ adapter.quote(col)|trim }} {%- if prefix!='' or suffix!='' %} as {{ adapter.quote(prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {%- else -%}\r\n {{ col|trim }} {%- if prefix!='' or suffix!='' %} as {{ (prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {% endif %}\r\n {%- if not loop.last %},{{ '\\n ' }}{%- endif -%}\r\n {%- endfor -%}\r\n {% endif %}\r\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3618062, "supported_languages": null}, "macro.dbt_utils.unpivot": {"name": "unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.unpivot", "macro_sql": "{% macro unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value', quote_identifiers=False) -%}\n {{ return(adapter.dispatch('unpivot', 'dbt_utils')(relation, cast_to, exclude, remove, field_name, value_name, quote_identifiers)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__unpivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.363343, "supported_languages": null}, "macro.dbt_utils.default__unpivot": {"name": "default__unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.default__unpivot", "macro_sql": "{% macro default__unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value', quote_identifiers=False) -%}\n\n {% if not relation %}\n {{ exceptions.raise_compiler_error(\"Error: argument `relation` is required for `unpivot` macro.\") }}\n {% endif %}\n\n {%- set exclude = exclude if exclude is not none else [] %}\n {%- set remove = remove if remove is not none else [] %}\n\n {%- set include_cols = [] %}\n\n {%- set table_columns = {} %}\n\n {%- do table_columns.update({relation: []}) %}\n\n {%- do dbt_utils._is_relation(relation, 'unpivot') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'unpivot') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) %}\n\n {%- for col in cols -%}\n {%- if col.column.lower() not in remove|map('lower') and col.column.lower() not in exclude|map('lower') -%}\n {% do include_cols.append(col) %}\n {%- endif %}\n {%- endfor %}\n\n\n {%- for col in include_cols -%}\n {%- set current_col_name = adapter.quote(col.column) if quote_identifiers else col.column -%}\n select\n {%- for exclude_col in exclude %}\n {{ adapter.quote(exclude_col) if quote_identifiers else exclude_col }},\n {%- endfor %}\n\n cast('{{ col.column }}' as {{ dbt.type_string() }}) as {{ adapter.quote(field_name) if quote_identifiers else field_name }},\n cast( {% if col.data_type == 'boolean' %}\n {{ dbt.cast_bool_to_text(current_col_name) }}\n {% else %}\n {{ current_col_name }}\n {% endif %}\n as {{ cast_to }}) as {{ adapter.quote(value_name) if quote_identifiers else value_name }}\n\n from {{ relation }}\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n {%- endfor -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.type_string", "macro.dbt.cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3650858, "supported_languages": null}, "macro.dbt_utils.safe_divide": {"name": "safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.safe_divide", "macro_sql": "{% macro safe_divide(numerator, denominator) -%}\n {{ return(adapter.dispatch('safe_divide', 'dbt_utils')(numerator, denominator)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_divide"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.365372, "supported_languages": null}, "macro.dbt_utils.default__safe_divide": {"name": "default__safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.default__safe_divide", "macro_sql": "{% macro default__safe_divide(numerator, denominator) %}\n ( {{ numerator }} ) / nullif( ( {{ denominator }} ), 0)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3655019, "supported_languages": null}, "macro.dbt_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n {{ return(adapter.dispatch('union_relations', 'dbt_utils')(relations, column_override, include, exclude, source_column_name, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.368782, "supported_languages": null}, "macro.dbt_utils.default__union_relations": {"name": "default__union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.default__union_relations", "macro_sql": "\n\n{%- macro default__union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n {%- set all_excludes = [] -%}\n {%- set all_includes = [] -%}\n\n {%- if exclude -%}\n {%- for exc in exclude -%}\n {%- do all_excludes.append(exc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- if include -%}\n {%- for inc in include -%}\n {%- do all_includes.append(inc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column | lower in all_excludes -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column | lower not in all_includes -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n {%- set dbt_command = flags.WHICH -%}\n\n\n {% if dbt_command in ['run', 'build'] %}\n {% if (include | length > 0 or exclude | length > 0) and not column_superset.keys() %}\n {%- set relations_string -%}\n {%- for relation in relations -%}\n {{ relation.name }}\n {%- if not loop.last %}, {% endif -%}\n {%- endfor -%}\n {%- endset -%}\n\n {%- set error_message -%}\n There were no columns found to union for relations {{ relations_string }}\n {%- endset -%}\n\n {{ exceptions.raise_compiler_error(error_message) }}\n {%- endif -%}\n {%- endif -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n {%- if source_column_name is not none %}\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {%- endif %}\n\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ relation }}\n\n {% if where -%}\n where {{ where }}\n {%- endif %}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.372125, "supported_languages": null}, "macro.dbt_utils.group_by": {"name": "group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.group_by", "macro_sql": "{%- macro group_by(n) -%}\n {{ return(adapter.dispatch('group_by', 'dbt_utils')(n)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__group_by"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3724558, "supported_languages": null}, "macro.dbt_utils.default__group_by": {"name": "default__group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.default__group_by", "macro_sql": "\n\n{%- macro default__group_by(n) -%}\n\n group by {% for i in range(1, n + 1) -%}\n {{ i }}{{ ',' if not loop.last }} \n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3727, "supported_languages": null}, "macro.dbt_utils.deduplicate": {"name": "deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.deduplicate", "macro_sql": "{%- macro deduplicate(relation, partition_by, order_by) -%}\n {{ return(adapter.dispatch('deduplicate', 'dbt_utils')(relation, partition_by, order_by)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.postgres__deduplicate"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.373635, "supported_languages": null}, "macro.dbt_utils.default__deduplicate": {"name": "default__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.default__deduplicate", "macro_sql": "\n\n{%- macro default__deduplicate(relation, partition_by, order_by) -%}\n\n with row_numbered as (\n select\n _inner.*,\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) as rn\n from {{ relation }} as _inner\n )\n\n select\n distinct data.*\n from {{ relation }} as data\n {#\n -- Not all DBs will support natural joins but the ones that do include:\n -- Oracle, MySQL, SQLite, Redshift, Teradata, Materialize, Databricks\n -- Apache Spark, SingleStore, Vertica\n -- Those that do not appear to support natural joins include:\n -- SQLServer, Trino, Presto, Rockset, Athena\n #}\n natural join row_numbered\n where row_numbered.rn = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.373846, "supported_languages": null}, "macro.dbt_utils.redshift__deduplicate": {"name": "redshift__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.redshift__deduplicate", "macro_sql": "{% macro redshift__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }} as tt\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3740091, "supported_languages": null}, "macro.dbt_utils.postgres__deduplicate": {"name": "postgres__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.postgres__deduplicate", "macro_sql": "\n{%- macro postgres__deduplicate(relation, partition_by, order_by) -%}\n\n select\n distinct on ({{ partition_by }}) *\n from {{ relation }}\n order by {{ partition_by }}{{ ',' ~ order_by }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.37419, "supported_languages": null}, "macro.dbt_utils.snowflake__deduplicate": {"name": "snowflake__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.snowflake__deduplicate", "macro_sql": "\n{%- macro snowflake__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }}\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3743548, "supported_languages": null}, "macro.dbt_utils.databricks__deduplicate": {"name": "databricks__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.databricks__deduplicate", "macro_sql": "\n{%- macro databricks__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }}\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.374513, "supported_languages": null}, "macro.dbt_utils.bigquery__deduplicate": {"name": "bigquery__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.bigquery__deduplicate", "macro_sql": "\n{%- macro bigquery__deduplicate(relation, partition_by, order_by) -%}\n\n select unique.*\n from (\n select\n array_agg (\n original\n order by {{ order_by }}\n limit 1\n )[offset(0)] unique\n from {{ relation }} original\n group by {{ partition_by }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3746781, "supported_languages": null}, "macro.dbt_utils.surrogate_key": {"name": "surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.surrogate_key", "macro_sql": "{%- macro surrogate_key(field_list) -%}\n {% set frustrating_jinja_feature = varargs %}\n {{ return(adapter.dispatch('surrogate_key', 'dbt_utils')(field_list, *varargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.375047, "supported_languages": null}, "macro.dbt_utils.default__surrogate_key": {"name": "default__surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.default__surrogate_key", "macro_sql": "\n\n{%- macro default__surrogate_key(field_list) -%}\n\n{%- set error_message = '\nWarning: `dbt_utils.surrogate_key` has been replaced by \\\n`dbt_utils.generate_surrogate_key`. The new macro treats null values \\\ndifferently to empty strings. To restore the behaviour of the original \\\nmacro, add a global variable in dbt_project.yml called \\\n`surrogate_key_treat_nulls_as_empty_strings` to your \\\ndbt_project.yml file with a value of True. \\\nThe {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.375274, "supported_languages": null}, "macro.dbt_utils.safe_add": {"name": "safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.safe_add", "macro_sql": "{%- macro safe_add(field_list) -%}\n {{ return(adapter.dispatch('safe_add', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.375634, "supported_languages": null}, "macro.dbt_utils.default__safe_add": {"name": "default__safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.default__safe_add", "macro_sql": "\n\n{%- macro default__safe_add(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_add` macro now takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.warn(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' +\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3761432, "supported_languages": null}, "macro.dbt_utils.nullcheck": {"name": "nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.nullcheck", "macro_sql": "{% macro nullcheck(cols) %}\n {{ return(adapter.dispatch('nullcheck', 'dbt_utils')(cols)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3764648, "supported_languages": null}, "macro.dbt_utils.default__nullcheck": {"name": "default__nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.default__nullcheck", "macro_sql": "{% macro default__nullcheck(cols) %}\n{%- for col in cols %}\n\n {% if col.is_string() -%}\n\n nullif({{col.name}},'') as {{col.name}}\n\n {%- else -%}\n\n {{col.name}}\n\n {%- endif -%}\n\n{%- if not loop.last -%} , {%- endif -%}\n\n{%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.376776, "supported_languages": null}, "macro.dbt_utils.get_tables_by_pattern_sql": {"name": "get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_pattern_sql", "macro_sql": "{% macro get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_pattern_sql', 'dbt_utils')\n (schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.380082, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_pattern_sql": {"name": "default__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_pattern_sql", "macro_sql": "{% macro default__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n select distinct\n table_schema as {{ adapter.quote('table_schema') }},\n table_name as {{ adapter.quote('table_name') }},\n {{ dbt_utils.get_table_types_sql() }}\n from {{ database }}.information_schema.tables\n where table_schema ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.380426, "supported_languages": null}, "macro.dbt_utils.redshift__get_tables_by_pattern_sql": {"name": "redshift__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.redshift__get_tables_by_pattern_sql", "macro_sql": "{% macro redshift__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {% set sql %}\n select distinct\n table_schema as {{ adapter.quote('table_schema') }},\n table_name as {{ adapter.quote('table_name') }},\n {{ dbt_utils.get_table_types_sql() }}\n from \"{{ database }}\".\"information_schema\".\"tables\"\n where table_schema ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n union all\n select distinct\n schemaname as {{ adapter.quote('table_schema') }},\n tablename as {{ adapter.quote('table_name') }},\n 'external' as {{ adapter.quote('table_type') }}\n from svv_external_tables\n where redshift_database_name = '{{ database }}'\n and schemaname ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n {% endset %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.381126, "supported_languages": null}, "macro.dbt_utils.bigquery__get_tables_by_pattern_sql": {"name": "bigquery__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.bigquery__get_tables_by_pattern_sql", "macro_sql": "{% macro bigquery__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {% if '%' in schema_pattern %}\n {% set schemata=dbt_utils._bigquery__get_matching_schemata(schema_pattern, database) %}\n {% else %}\n {% set schemata=[schema_pattern] %}\n {% endif %}\n\n {% set sql %}\n {% for schema in schemata %}\n select distinct\n table_schema,\n table_name,\n {{ dbt_utils.get_table_types_sql() }}\n\n from {{ adapter.quote(database) }}.{{ schema }}.INFORMATION_SCHEMA.TABLES\n where lower(table_name) like lower ('{{ table_pattern }}')\n and lower(table_name) not like lower ('{{ exclude }}')\n\n {% if not loop.last %} union all {% endif %}\n\n {% endfor %}\n {% endset %}\n\n {{ return(sql) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._bigquery__get_matching_schemata", "macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.381802, "supported_languages": null}, "macro.dbt_utils._bigquery__get_matching_schemata": {"name": "_bigquery__get_matching_schemata", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils._bigquery__get_matching_schemata", "macro_sql": "{% macro _bigquery__get_matching_schemata(schema_pattern, database) %}\n {% if execute %}\n\n {% set sql %}\n select schema_name from {{ adapter.quote(database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like lower('{{ schema_pattern }}')\n {% endset %}\n\n {% set results=run_query(sql) %}\n\n {% set schemata=results.columns['schema_name'].values() %}\n\n {{ return(schemata) }}\n\n {% else %}\n\n {{ return([]) }}\n\n {% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.382273, "supported_languages": null}, "macro.dbt_utils.get_column_values": {"name": "get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.get_column_values", "macro_sql": "{% macro get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {{ return(adapter.dispatch('get_column_values', 'dbt_utils')(table, column, order_by, max_records, default, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_column_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.38338, "supported_languages": null}, "macro.dbt_utils.default__get_column_values": {"name": "default__get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.default__get_column_values", "macro_sql": "{% macro default__get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {% set default = [] if not default %}\n {{ return(default) }}\n {% endif %}\n\n {%- do dbt_utils._is_ephemeral(table, 'get_column_values') -%}\n\n {# Not all relations are tables. Renaming for internal clarity without breaking functionality for anyone using named arguments #}\n {# TODO: Change the method signature in a future 0.x.0 release #}\n {%- set target_relation = table -%}\n\n {# adapter.load_relation is a convenience wrapper to avoid building a Relation when we already have one #}\n {% set relation_exists = (load_relation(target_relation)) is not none %}\n\n {%- call statement('get_column_values', fetch_result=true) %}\n\n {%- if not relation_exists and default is none -%}\n\n {{ exceptions.raise_compiler_error(\"In get_column_values(): relation \" ~ target_relation ~ \" does not exist and no default value was provided.\") }}\n\n {%- elif not relation_exists and default is not none -%}\n\n {{ log(\"Relation \" ~ target_relation ~ \" does not exist. Returning the default value: \" ~ default) }}\n\n {{ return(default) }}\n\n {%- else -%}\n\n\n select\n {{ column }} as value\n\n from {{ target_relation }}\n\n {% if where is not none %}\n where {{ where }}\n {% endif %}\n\n group by {{ column }}\n order by {{ order_by }}\n\n {% if max_records is not none %}\n limit {{ max_records }}\n {% endif %}\n\n {% endif %}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_column_values') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values) }}\n {%- else -%}\n {{ return(default) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_ephemeral", "macro.dbt.load_relation", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.384792, "supported_languages": null}, "macro.dbt_utils.pivot": {"name": "pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.pivot", "macro_sql": "{% macro pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {{ return(adapter.dispatch('pivot', 'dbt_utils')(column, values, alias, agg, cmp, prefix, suffix, then_value, else_value, quote_identifiers, distinct)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.38588, "supported_languages": null}, "macro.dbt_utils.default__pivot": {"name": "default__pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.default__pivot", "macro_sql": "{% macro default__pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {% for value in values %}\n {{ agg }}(\n {% if distinct %} distinct {% endif %}\n case\n when {{ column }} {{ cmp }} '{{ dbt.escape_single_quotes(value) }}'\n then {{ then_value }}\n else {{ else_value }}\n end\n )\n {% if alias %}\n {% if quote_identifiers %}\n as {{ adapter.quote(prefix ~ value ~ suffix) }}\n {% else %}\n as {{ dbt_utils.slugify(prefix ~ value ~ suffix) }}\n {% endif %}\n {% endif %}\n {% if not loop.last %},{% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.escape_single_quotes", "macro.dbt_utils.slugify"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.386676, "supported_languages": null}, "macro.dbt_utils.get_filtered_columns_in_relation": {"name": "get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.get_filtered_columns_in_relation", "macro_sql": "{% macro get_filtered_columns_in_relation(from, except=[]) -%}\n {{ return(adapter.dispatch('get_filtered_columns_in_relation', 'dbt_utils')(from, except)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.387125, "supported_languages": null}, "macro.dbt_utils.default__get_filtered_columns_in_relation": {"name": "default__get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.default__get_filtered_columns_in_relation", "macro_sql": "{% macro default__get_filtered_columns_in_relation(from, except=[]) -%}\n {%- do dbt_utils._is_relation(from, 'get_filtered_columns_in_relation') -%}\n {%- do dbt_utils._is_ephemeral(from, 'get_filtered_columns_in_relation') -%}\n\n {# -- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {{ return('') }}\n {% endif %}\n\n {%- set include_cols = [] %}\n {%- set cols = adapter.get_columns_in_relation(from) -%}\n {%- set except = except | map(\"lower\") | list %}\n {%- for col in cols -%}\n {%- if col.column|lower not in except -%}\n {% do include_cols.append(col.column) %}\n {%- endif %}\n {%- endfor %}\n\n {{ return(include_cols) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.387816, "supported_languages": null}, "macro.dbt_utils.width_bucket": {"name": "width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.width_bucket", "macro_sql": "{% macro width_bucket(expr, min_value, max_value, num_buckets) %}\n {{ return(adapter.dispatch('width_bucket', 'dbt_utils') (expr, min_value, max_value, num_buckets)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__width_bucket"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3885891, "supported_languages": null}, "macro.dbt_utils.default__width_bucket": {"name": "default__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.default__width_bucket", "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.389027, "supported_languages": null}, "macro.dbt_utils.snowflake__width_bucket": {"name": "snowflake__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.snowflake__width_bucket", "macro_sql": "{% macro snowflake__width_bucket(expr, min_value, max_value, num_buckets) %}\n width_bucket({{ expr }}, {{ min_value }}, {{ max_value }}, {{ num_buckets }} )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3892071, "supported_languages": null}, "macro.dbt_utils.get_query_results_as_dict": {"name": "get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.get_query_results_as_dict", "macro_sql": "{% macro get_query_results_as_dict(query) %}\n {{ return(adapter.dispatch('get_query_results_as_dict', 'dbt_utils')(query)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_query_results_as_dict"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.389579, "supported_languages": null}, "macro.dbt_utils.default__get_query_results_as_dict": {"name": "default__get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.default__get_query_results_as_dict", "macro_sql": "{% macro default__get_query_results_as_dict(query) %}\n\n{# This macro returns a dictionary of the form {column_name: (tuple_of_results)} #}\n\n {%- call statement('get_query_results', fetch_result=True,auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {% set sql_results={} %}\n\n {%- if execute -%}\n {% set sql_results_table = load_result('get_query_results').table.columns %}\n {% for column_name, column in sql_results_table.items() %}\n {% do sql_results.update({column_name: column.values()}) %}\n {% endfor %}\n {%- endif -%}\n\n {{ return(sql_results) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3901381, "supported_languages": null}, "macro.dbt_utils.generate_surrogate_key": {"name": "generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.generate_surrogate_key", "macro_sql": "{%- macro generate_surrogate_key(field_list) -%}\n {{ return(adapter.dispatch('generate_surrogate_key', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.390584, "supported_languages": null}, "macro.dbt_utils.default__generate_surrogate_key": {"name": "default__generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.default__generate_surrogate_key", "macro_sql": "\n\n{%- macro default__generate_surrogate_key(field_list) -%}\n\n{%- if var('surrogate_key_treat_nulls_as_empty_strings', False) -%}\n {%- set default_null_value = \"\" -%}\n{%- else -%}\n {%- set default_null_value = '_dbt_utils_surrogate_key_null_' -%}\n{%- endif -%}\n\n{%- set fields = [] -%}\n\n{%- for field in field_list -%}\n\n {%- do fields.append(\n \"coalesce(cast(\" ~ field ~ \" as \" ~ dbt.type_string() ~ \"), '\" ~ default_null_value ~\"')\"\n ) -%}\n\n {%- if not loop.last %}\n {%- do fields.append(\"'-'\") -%}\n {%- endif -%}\n\n{%- endfor -%}\n\n{{ dbt.hash(dbt.concat(fields)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.hash", "macro.dbt.concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.39118, "supported_languages": null}, "macro.dbt_utils.get_table_types_sql": {"name": "get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.get_table_types_sql", "macro_sql": "{%- macro get_table_types_sql() -%}\n {{ return(adapter.dispatch('get_table_types_sql', 'dbt_utils')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils.postgres__get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.39171, "supported_languages": null}, "macro.dbt_utils.default__get_table_types_sql": {"name": "default__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.default__get_table_types_sql", "macro_sql": "{% macro default__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'EXTERNAL TABLE' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3918562, "supported_languages": null}, "macro.dbt_utils.postgres__get_table_types_sql": {"name": "postgres__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.postgres__get_table_types_sql", "macro_sql": "{% macro postgres__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'FOREIGN' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.39199, "supported_languages": null}, "macro.dbt_utils.databricks__get_table_types_sql": {"name": "databricks__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.databricks__get_table_types_sql", "macro_sql": "{% macro databricks__get_table_types_sql() %}\n case table_type\n when 'MANAGED' then 'table'\n when 'BASE TABLE' then 'table'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.392123, "supported_languages": null}, "macro.dbt_utils.get_single_value": {"name": "get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.get_single_value", "macro_sql": "{% macro get_single_value(query, default=none) %}\n {{ return(adapter.dispatch('get_single_value', 'dbt_utils')(query, default)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_single_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.392621, "supported_languages": null}, "macro.dbt_utils.default__get_single_value": {"name": "default__get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.default__get_single_value", "macro_sql": "{% macro default__get_single_value(query, default) %}\n\n{# This macro returns the (0, 0) record in a query, i.e. the first row of the first column #}\n\n {%- call statement('get_query_result', fetch_result=True, auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {%- if execute -%}\n\n {% set r = load_result('get_query_result').table.columns[0].values() %}\n {% if r | length == 0 %}\n {% do print('Query `' ~ query ~ '` returned no rows. Using the default value: ' ~ default) %}\n {% set sql_result = default %}\n {% else %}\n {% set sql_result = r[0] %}\n {% endif %}\n \n {%- else -%}\n \n {% set sql_result = default %}\n \n {%- endif -%}\n\n {% do return(sql_result) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.393381, "supported_languages": null}, "macro.dbt_utils.degrees_to_radians": {"name": "degrees_to_radians", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.degrees_to_radians", "macro_sql": "{% macro degrees_to_radians(degrees) -%}\n acos(-1) * {{degrees}} / 180\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.3944619, "supported_languages": null}, "macro.dbt_utils.haversine_distance": {"name": "haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.haversine_distance", "macro_sql": "{% macro haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n {{ return(adapter.dispatch('haversine_distance', 'dbt_utils')(lat1,lon1,lat2,lon2,unit)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__haversine_distance"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.394728, "supported_languages": null}, "macro.dbt_utils.default__haversine_distance": {"name": "default__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.default__haversine_distance", "macro_sql": "{% macro default__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n\n 2 * 3961 * asin(sqrt(power((sin(radians(({{ lat2 }} - {{ lat1 }}) / 2))), 2) +\n cos(radians({{lat1}})) * cos(radians({{lat2}})) *\n power((sin(radians(({{ lon2 }} - {{ lon1 }}) / 2))), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.395282, "supported_languages": null}, "macro.dbt_utils.bigquery__haversine_distance": {"name": "bigquery__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.bigquery__haversine_distance", "macro_sql": "{% macro bigquery__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{% set radians_lat1 = dbt_utils.degrees_to_radians(lat1) %}\n{% set radians_lat2 = dbt_utils.degrees_to_radians(lat2) %}\n{% set radians_lon1 = dbt_utils.degrees_to_radians(lon1) %}\n{% set radians_lon2 = dbt_utils.degrees_to_radians(lon2) %}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n 2 * 3961 * asin(sqrt(power(sin(({{ radians_lat2 }} - {{ radians_lat1 }}) / 2), 2) +\n cos({{ radians_lat1 }}) * cos({{ radians_lat2 }}) *\n power(sin(({{ radians_lon2 }} - {{ radians_lon1 }}) / 2), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.degrees_to_radians"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.396033, "supported_languages": null}, "macro.spark_utils.get_tables": {"name": "get_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_tables", "macro_sql": "{% macro get_tables(table_regex_pattern='.*') %}\n\n {% set tables = [] %}\n {% for database in spark__list_schemas('not_used') %}\n {% for table in spark__list_relations_without_caching(database[0]) %}\n {% set db_tablename = database[0] ~ \".\" ~ table[1] %}\n {% set is_match = modules.re.match(table_regex_pattern, db_tablename) %}\n {% if is_match %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('type', 'TYPE', 'Type'))|first %}\n {% if table_type[1]|lower != 'view' %}\n {{ tables.append(db_tablename) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% endfor %}\n {{ return(tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.401241, "supported_languages": null}, "macro.spark_utils.get_delta_tables": {"name": "get_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_delta_tables", "macro_sql": "{% macro get_delta_tables(table_regex_pattern='.*') %}\n\n {% set delta_tables = [] %}\n {% for db_tablename in get_tables(table_regex_pattern) %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('provider', 'PROVIDER', 'Provider'))|first %}\n {% if table_type[1]|lower == 'delta' %}\n {{ delta_tables.append(db_tablename) }}\n {% endif %}\n {% endfor %}\n {{ return(delta_tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.401896, "supported_languages": null}, "macro.spark_utils.get_statistic_columns": {"name": "get_statistic_columns", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_statistic_columns", "macro_sql": "{% macro get_statistic_columns(table) %}\n\n {% call statement('input_columns', fetch_result=True) %}\n SHOW COLUMNS IN {{ table }}\n {% endcall %}\n {% set input_columns = load_result('input_columns').table %}\n\n {% set output_columns = [] %}\n {% for column in input_columns %}\n {% call statement('column_information', fetch_result=True) %}\n DESCRIBE TABLE {{ table }} `{{ column[0] }}`\n {% endcall %}\n {% if not load_result('column_information').table[1][1].startswith('struct') and not load_result('column_information').table[1][1].startswith('array') %}\n {{ output_columns.append('`' ~ column[0] ~ '`') }}\n {% endif %}\n {% endfor %}\n {{ return(output_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.402724, "supported_languages": null}, "macro.spark_utils.spark_optimize_delta_tables": {"name": "spark_optimize_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_optimize_delta_tables", "macro_sql": "{% macro spark_optimize_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Optimizing \" ~ table) }}\n {% do run_query(\"optimize \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.403413, "supported_languages": null}, "macro.spark_utils.spark_vacuum_delta_tables": {"name": "spark_vacuum_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_vacuum_delta_tables", "macro_sql": "{% macro spark_vacuum_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Vacuuming \" ~ table) }}\n {% do run_query(\"vacuum \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.404089, "supported_languages": null}, "macro.spark_utils.spark_analyze_tables": {"name": "spark_analyze_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_analyze_tables", "macro_sql": "{% macro spark_analyze_tables(table_regex_pattern='.*') %}\n\n {% for table in get_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set columns = get_statistic_columns(table) | join(',') %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Analyzing \" ~ table) }}\n {% if columns != '' %}\n {% do run_query(\"analyze table \" ~ table ~ \" compute statistics for columns \" ~ columns) %}\n {% endif %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.spark_utils.get_statistic_columns", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.405142, "supported_languages": null}, "macro.spark_utils.spark__concat": {"name": "spark__concat", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/concat.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/concat.sql", "unique_id": "macro.spark_utils.spark__concat", "macro_sql": "{% macro spark__concat(fields) -%}\n concat({{ fields|join(', ') }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.405309, "supported_languages": null}, "macro.spark_utils.spark__type_numeric": {"name": "spark__type_numeric", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "unique_id": "macro.spark_utils.spark__type_numeric", "macro_sql": "{% macro spark__type_numeric() %}\n decimal(28, 6)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4054132, "supported_languages": null}, "macro.spark_utils.spark__dateadd": {"name": "spark__dateadd", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "unique_id": "macro.spark_utils.spark__dateadd", "macro_sql": "{% macro spark__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {%- set clock_component -%}\n {# make sure the dates + timestamps are real, otherwise raise an error asap #}\n to_unix_timestamp({{ spark_utils.assert_not_null('to_timestamp', from_date_or_timestamp) }})\n - to_unix_timestamp({{ spark_utils.assert_not_null('date', from_date_or_timestamp) }})\n {%- endset -%}\n\n {%- if datepart in ['day', 'week'] -%}\n \n {%- set multiplier = 7 if datepart == 'week' else 1 -%}\n\n to_timestamp(\n to_unix_timestamp(\n date_add(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ['month', 'quarter', 'year'] -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'month' -%} 1\n {%- elif datepart == 'quarter' -%} 3\n {%- elif datepart == 'year' -%} 12\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n to_unix_timestamp(\n add_months(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n {{ spark_utils.assert_not_null('to_unix_timestamp', from_date_or_timestamp) }}\n + cast({{interval}} * {{multiplier}} as int)\n )\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro dateadd not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.408185, "supported_languages": null}, "macro.spark_utils.spark__datediff": {"name": "spark__datediff", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datediff.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datediff.sql", "unique_id": "macro.spark_utils.spark__datediff", "macro_sql": "{% macro spark__datediff(first_date, second_date, datepart) %}\n\n {%- if datepart in ['day', 'week', 'month', 'quarter', 'year'] -%}\n \n {# make sure the dates are real, otherwise raise an error asap #}\n {% set first_date = spark_utils.assert_not_null('date', first_date) %}\n {% set second_date = spark_utils.assert_not_null('date', second_date) %}\n \n {%- endif -%}\n \n {%- if datepart == 'day' -%}\n \n datediff({{second_date}}, {{first_date}})\n \n {%- elif datepart == 'week' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(datediff({{second_date}}, {{first_date}})/7)\n else ceil(datediff({{second_date}}, {{first_date}})/7)\n end\n \n -- did we cross a week boundary (Sunday)?\n + case\n when {{first_date}} < {{second_date}} and dayofweek({{second_date}}) < dayofweek({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofweek({{second_date}}) > dayofweek({{first_date}}) then -1\n else 0 end\n\n {%- elif datepart == 'month' -%}\n\n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}})))\n else ceil(months_between(date({{second_date}}), date({{first_date}})))\n end\n \n -- did we cross a month boundary?\n + case\n when {{first_date}} < {{second_date}} and dayofmonth({{second_date}}) < dayofmonth({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofmonth({{second_date}}) > dayofmonth({{first_date}}) then -1\n else 0 end\n \n {%- elif datepart == 'quarter' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}}))/3)\n else ceil(months_between(date({{second_date}}), date({{first_date}}))/3)\n end\n \n -- did we cross a quarter boundary?\n + case\n when {{first_date}} < {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n < (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then 1\n when {{first_date}} > {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n > (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then -1\n else 0 end\n\n {%- elif datepart == 'year' -%}\n \n year({{second_date}}) - year({{first_date}})\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set divisor -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n case when {{first_date}} < {{second_date}}\n then ceil((\n {# make sure the timestamps are real, otherwise raise an error asap #}\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n else floor((\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n end\n \n {% if datepart == 'millisecond' %}\n + cast(date_format({{second_date}}, 'SSS') as int)\n - cast(date_format({{first_date}}, 'SSS') as int)\n {% endif %}\n \n {% if datepart == 'microsecond' %} \n {% set capture_str = '[0-9]{4}-[0-9]{2}-[0-9]{2}.[0-9]{2}:[0-9]{2}:[0-9]{2}.([0-9]{6})' %}\n -- Spark doesn't really support microseconds, so this is a massive hack!\n -- It will only work if the timestamp-string is of the format\n -- 'yyyy-MM-dd-HH mm.ss.SSSSSS'\n + cast(regexp_extract({{second_date}}, '{{capture_str}}', 1) as int)\n - cast(regexp_extract({{first_date}}, '{{capture_str}}', 1) as int) \n {% endif %}\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro datediff not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.415711, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp": {"name": "spark__current_timestamp", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp", "macro_sql": "{% macro spark__current_timestamp() %}\n current_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.415851, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp_in_utc": {"name": "spark__current_timestamp_in_utc", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp_in_utc", "macro_sql": "{% macro spark__current_timestamp_in_utc() %}\n unix_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.415925, "supported_languages": null}, "macro.spark_utils.spark__split_part": {"name": "spark__split_part", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/split_part.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/split_part.sql", "unique_id": "macro.spark_utils.spark__split_part", "macro_sql": "{% macro spark__split_part(string_text, delimiter_text, part_number) %}\n\n {% set delimiter_expr %}\n \n -- escape if starts with a special character\n case when regexp_extract({{ delimiter_text }}, '([^A-Za-z0-9])(.*)', 1) != '_'\n then concat('\\\\', {{ delimiter_text }})\n else {{ delimiter_text }} end\n \n {% endset %}\n\n {% set split_part_expr %}\n \n split(\n {{ string_text }},\n {{ delimiter_expr }}\n )[({{ part_number - 1 }})]\n \n {% endset %}\n \n {{ return(split_part_expr) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.416466, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_pattern": {"name": "spark__get_relations_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_pattern", "macro_sql": "{% macro spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n show table extended in {{ schema_pattern }} like '{{ table_pattern }}'\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=None,\n schema=row[0],\n identifier=row[1],\n type=('view' if 'Type: VIEW' in row[3] else 'table')\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.417989, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_prefix": {"name": "spark__get_relations_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_prefix", "macro_sql": "{% macro spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {% set table_pattern = table_pattern ~ '*' %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4183092, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_pattern": {"name": "spark__get_tables_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_pattern", "macro_sql": "{% macro spark__get_tables_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.418575, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_prefix": {"name": "spark__get_tables_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_prefix", "macro_sql": "{% macro spark__get_tables_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4188359, "supported_languages": null}, "macro.spark_utils.assert_not_null": {"name": "assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.assert_not_null", "macro_sql": "{% macro assert_not_null(function, arg) -%}\n {{ return(adapter.dispatch('assert_not_null', 'spark_utils')(function, arg)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.spark_utils.default__assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4191449, "supported_languages": null}, "macro.spark_utils.default__assert_not_null": {"name": "default__assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.default__assert_not_null", "macro_sql": "{% macro default__assert_not_null(function, arg) %}\n\n coalesce({{function}}({{arg}}), nvl2({{function}}({{arg}}), assert_true({{function}}({{arg}}) is not null), null))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.419338, "supported_languages": null}, "macro.spark_utils.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/snowplow/convert_timezone.sql", "original_file_path": "macros/snowplow/convert_timezone.sql", "unique_id": "macro.spark_utils.spark__convert_timezone", "macro_sql": "{% macro spark__convert_timezone(in_tz, out_tz, in_timestamp) %}\n from_utc_timestamp(to_utc_timestamp({{in_timestamp}}, {{in_tz}}), {{out_tz}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.419531, "supported_languages": null}, "macro.dbt_date.get_date_dimension": {"name": "get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.get_date_dimension", "macro_sql": "{% macro get_date_dimension(start_date, end_date) %}\n {{ adapter.dispatch('get_date_dimension', 'dbt_date') (start_date, end_date) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__get_date_dimension"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.428715, "supported_languages": null}, "macro.dbt_date.default__get_date_dimension": {"name": "default__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.default__get_date_dimension", "macro_sql": "{% macro default__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=false) }} as day_of_week,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week_iso,\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n cast({{ last_day('d.date_day', 'quarter') }} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4308748, "supported_languages": null}, "macro.dbt_date.postgres__get_date_dimension": {"name": "postgres__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.postgres__get_date_dimension", "macro_sql": "{% macro postgres__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week,\n\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n {# last_day does not support quarter because postgresql does not support quarter interval. #}\n cast({{dbt.dateadd('day', '-1', dbt.dateadd('month', '3', dbt.date_trunc('quarter', 'd.date_day')))}} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.433011, "supported_languages": null}, "macro.dbt_date.get_base_dates": {"name": "get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.get_base_dates", "macro_sql": "{% macro get_base_dates(start_date=None, end_date=None, n_dateparts=None, datepart=\"day\") %}\n {{ adapter.dispatch('get_base_dates', 'dbt_date') (start_date, end_date, n_dateparts, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_base_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.434651, "supported_languages": null}, "macro.dbt_date.default__get_base_dates": {"name": "default__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.default__get_base_dates", "macro_sql": "{% macro default__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.435349, "supported_languages": null}, "macro.dbt_date.bigquery__get_base_dates": {"name": "bigquery__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.bigquery__get_base_dates", "macro_sql": "{% macro bigquery__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as datetime )\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as datetime )\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.435981, "supported_languages": null}, "macro.dbt_date.trino__get_base_dates": {"name": "trino__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.trino__get_base_dates", "macro_sql": "{% macro trino__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.now()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.now", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4366689, "supported_languages": null}, "macro.dbt_date.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_date')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4373639, "supported_languages": null}, "macro.dbt_date.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4379148, "supported_languages": null}, "macro.dbt_date.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_date')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.438137, "supported_languages": null}, "macro.dbt_date.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{\n dbt_date.generate_series(\n dbt_date.get_intervals_between(start_date, end_date, datepart)\n )\n }}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"(row_number() over (order by 1) - 1)\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.generate_series", "macro.dbt_date.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.438486, "supported_languages": null}, "macro.dbt_date.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.439306, "supported_languages": null}, "macro.dbt_date.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4396899, "supported_languages": null}, "macro.dbt_date.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4398699, "supported_languages": null}, "macro.dbt_date.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_date.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.440354, "supported_languages": null}, "macro.dbt_date.date": {"name": "date", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.date", "macro_sql": "{% macro date(year, month, day) %}\n {{ return(modules.datetime.date(year, month, day)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.44068, "supported_languages": null}, "macro.dbt_date.datetime": {"name": "datetime", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.datetime", "macro_sql": "{% macro datetime(year, month, day, hour=0, minute=0, second=0, microsecond=0, tz=None) %}\n {% set tz = tz if tz else var(\"dbt_date:time_zone\") %}\n {{ return(\n modules.datetime.datetime(\n year=year, month=month, day=day, hour=hour,\n minute=minute, second=second, microsecond=microsecond,\n tzinfo=modules.pytz.timezone(tz)\n )\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4412, "supported_languages": null}, "macro.dbt_date.get_fiscal_year_dates": {"name": "get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.get_fiscal_year_dates", "macro_sql": "{% macro get_fiscal_year_dates(dates, year_end_month=12, week_start_day=1, shift_year=1) %}\n{{ adapter.dispatch('get_fiscal_year_dates', 'dbt_date') (dates, year_end_month, week_start_day, shift_year) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_fiscal_year_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4430232, "supported_languages": null}, "macro.dbt_date.default__get_fiscal_year_dates": {"name": "default__get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.default__get_fiscal_year_dates", "macro_sql": "{% macro default__get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) %}\n-- this gets all the dates within a fiscal year\n-- determined by the given year-end-month\n-- ending on the saturday closest to that month's end date\nwith fsc_date_dimension as (\n select * from {{ dates }}\n),\nyear_month_end as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.month_end_date\n from\n fsc_date_dimension d\n where\n d.month_of_year = {{ year_end_month }}\n group by 1,2\n\n),\nweeks as (\n\n select\n d.year_number,\n d.month_of_year,\n d.date_day as week_start_date,\n cast({{ dbt.dateadd('day', 6, 'd.date_day') }} as date) as week_end_date\n from\n fsc_date_dimension d\n where\n d.day_of_week = {{ week_start_day }}\n\n),\n-- get all the weeks that start in the month the year ends\nyear_week_ends as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.week_end_date\n from\n weeks d\n where\n d.month_of_year = {{ year_end_month }}\n group by\n 1,2\n\n),\n-- then calculate which Saturday is closest to month end\nweeks_at_month_end as (\n\n select\n d.fiscal_year_number,\n d.week_end_date,\n m.month_end_date,\n rank() over\n (partition by d.fiscal_year_number\n order by\n abs({{ dbt.datediff('d.week_end_date', 'm.month_end_date', 'day') }})\n\n ) as closest_to_month_end\n from\n year_week_ends d\n join\n year_month_end m on d.fiscal_year_number = m.fiscal_year_number\n),\nfiscal_year_range as (\n\n select\n w.fiscal_year_number,\n cast(\n {{ dbt.dateadd('day', 1,\n 'lag(w.week_end_date) over(order by w.week_end_date)') }}\n as date) as fiscal_year_start_date,\n w.week_end_date as fiscal_year_end_date\n from\n weeks_at_month_end w\n where\n w.closest_to_month_end = 1\n\n),\nfiscal_year_dates as (\n\n select\n d.date_day,\n m.fiscal_year_number,\n m.fiscal_year_start_date,\n m.fiscal_year_end_date,\n w.week_start_date,\n w.week_end_date,\n -- we reset the weeks of the year starting with the merch year start date\n dense_rank()\n over(\n partition by m.fiscal_year_number\n order by w.week_start_date\n ) as fiscal_week_of_year\n from\n fsc_date_dimension d\n join\n fiscal_year_range m on d.date_day between m.fiscal_year_start_date and m.fiscal_year_end_date\n join\n weeks w on d.date_day between w.week_start_date and w.week_end_date\n\n)\nselect * from fiscal_year_dates order by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4435909, "supported_languages": null}, "macro.dbt_date.get_fiscal_periods": {"name": "get_fiscal_periods", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_periods.sql", "original_file_path": "macros/fiscal_date/get_fiscal_periods.sql", "unique_id": "macro.dbt_date.get_fiscal_periods", "macro_sql": "{% macro get_fiscal_periods(dates, year_end_month, week_start_day, shift_year=1) %}\n{#\nThis macro requires you to pass in a ref to a date dimension, created via\ndbt_date.get_date_dimension()s\n#}\nwith fscl_year_dates_for_periods as (\n {{ dbt_date.get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) }}\n),\nfscl_year_w13 as (\n\n select\n f.*,\n -- We count the weeks in a 13 week period\n -- and separate the 4-5-4 week sequences\n mod(cast(\n (f.fiscal_week_of_year-1) as {{ dbt.type_int() }}\n ), 13) as w13_number,\n -- Chop weeks into 13 week merch quarters\n cast(\n least(\n floor((f.fiscal_week_of_year-1)/13.0)\n , 3)\n as {{ dbt.type_int() }}) as quarter_number\n from\n fscl_year_dates_for_periods f\n\n),\nfscl_periods as (\n\n select\n f.date_day,\n f.fiscal_year_number,\n f.week_start_date,\n f.week_end_date,\n f.fiscal_week_of_year,\n case\n -- we move week 53 into the 3rd period of the quarter\n when f.fiscal_week_of_year = 53 then 3\n when f.w13_number between 0 and 3 then 1\n when f.w13_number between 4 and 8 then 2\n when f.w13_number between 9 and 12 then 3\n end as period_of_quarter,\n f.quarter_number\n from\n fscl_year_w13 f\n\n),\nfscl_periods_quarters as (\n\n select\n f.*,\n cast((\n (f.quarter_number * 3) + f.period_of_quarter\n ) as {{ dbt.type_int() }}) as fiscal_period_number\n from\n fscl_periods f\n\n)\nselect\n date_day,\n fiscal_year_number,\n week_start_date,\n week_end_date,\n fiscal_week_of_year,\n dense_rank() over(partition by fiscal_period_number order by fiscal_week_of_year) as fiscal_week_of_period,\n fiscal_period_number,\n quarter_number+1 as fiscal_quarter_number,\n period_of_quarter as fiscal_period_of_quarter\nfrom\n fscl_periods_quarters\norder by 1,2\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_fiscal_year_dates", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.444541, "supported_languages": null}, "macro.dbt_date.tomorrow": {"name": "tomorrow", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/tomorrow.sql", "original_file_path": "macros/calendar_date/tomorrow.sql", "unique_id": "macro.dbt_date.tomorrow", "macro_sql": "{%- macro tomorrow(date=None, tz=None) -%}\n{{ dbt_date.n_days_away(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.444752, "supported_languages": null}, "macro.dbt_date.next_week": {"name": "next_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_week.sql", "original_file_path": "macros/calendar_date/next_week.sql", "unique_id": "macro.dbt_date.next_week", "macro_sql": "{%- macro next_week(tz=None) -%}\n{{ dbt_date.n_weeks_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.444926, "supported_languages": null}, "macro.dbt_date.next_month_name": {"name": "next_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_name.sql", "original_file_path": "macros/calendar_date/next_month_name.sql", "unique_id": "macro.dbt_date.next_month_name", "macro_sql": "{%- macro next_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.next_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.445218, "supported_languages": null}, "macro.dbt_date.next_month": {"name": "next_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month.sql", "original_file_path": "macros/calendar_date/next_month.sql", "unique_id": "macro.dbt_date.next_month", "macro_sql": "{%- macro next_month(tz=None) -%}\n{{ dbt_date.n_months_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4453928, "supported_languages": null}, "macro.dbt_date.day_name": {"name": "day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.day_name", "macro_sql": "{%- macro day_name(date, short=True) -%}\n {{ adapter.dispatch('day_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.44622, "supported_languages": null}, "macro.dbt_date.default__day_name": {"name": "default__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.default__day_name", "macro_sql": "\n\n{%- macro default__day_name(date, short) -%}\n{%- set f = 'Dy' if short else 'Day' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.446412, "supported_languages": null}, "macro.dbt_date.snowflake__day_name": {"name": "snowflake__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.snowflake__day_name", "macro_sql": "\n\n{%- macro snowflake__day_name(date, short) -%}\n {%- if short -%}\n dayname({{ date }})\n {%- else -%}\n -- long version not implemented on Snowflake so we're doing it manually :/\n case dayname({{ date }})\n when 'Mon' then 'Monday'\n when 'Tue' then 'Tuesday'\n when 'Wed' then 'Wednesday'\n when 'Thu' then 'Thursday'\n when 'Fri' then 'Friday'\n when 'Sat' then 'Saturday'\n when 'Sun' then 'Sunday'\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.446612, "supported_languages": null}, "macro.dbt_date.bigquery__day_name": {"name": "bigquery__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.bigquery__day_name", "macro_sql": "\n\n{%- macro bigquery__day_name(date, short) -%}\n{%- set f = '%a' if short else '%A' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.446801, "supported_languages": null}, "macro.dbt_date.postgres__day_name": {"name": "postgres__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.postgres__day_name", "macro_sql": "\n\n{%- macro postgres__day_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMDy' if short else 'FMDay' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.446997, "supported_languages": null}, "macro.dbt_date.duckdb__day_name": {"name": "duckdb__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.duckdb__day_name", "macro_sql": "\n\n{%- macro duckdb__day_name(date, short) -%}\n {%- if short -%}\n substr(dayname({{ date }}), 1, 3)\n {%- else -%}\n dayname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.447175, "supported_languages": null}, "macro.dbt_date.spark__day_name": {"name": "spark__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.spark__day_name", "macro_sql": "\n\n{%- macro spark__day_name(date, short) -%}\n{%- set f = 'E' if short else 'EEEE' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.447367, "supported_languages": null}, "macro.dbt_date.trino__day_name": {"name": "trino__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.trino__day_name", "macro_sql": "\n\n{%- macro trino__day_name(date, short) -%}\n{%- set f = 'a' if short else 'W' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.447561, "supported_languages": null}, "macro.dbt_date.to_unixtimestamp": {"name": "to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.to_unixtimestamp", "macro_sql": "{%- macro to_unixtimestamp(timestamp) -%}\n {{ adapter.dispatch('to_unixtimestamp', 'dbt_date') (timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__to_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4479501, "supported_languages": null}, "macro.dbt_date.default__to_unixtimestamp": {"name": "default__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__to_unixtimestamp", "macro_sql": "\n\n{%- macro default__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.448083, "supported_languages": null}, "macro.dbt_date.snowflake__to_unixtimestamp": {"name": "snowflake__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__to_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch_seconds', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.448213, "supported_languages": null}, "macro.dbt_date.bigquery__to_unixtimestamp": {"name": "bigquery__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__to_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__to_unixtimestamp(timestamp) -%}\n unix_seconds({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.448313, "supported_languages": null}, "macro.dbt_date.spark__to_unixtimestamp": {"name": "spark__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.spark__to_unixtimestamp", "macro_sql": "\n\n{%- macro spark__to_unixtimestamp(timestamp) -%}\n unix_timestamp({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.448413, "supported_languages": null}, "macro.dbt_date.trino__to_unixtimestamp": {"name": "trino__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__to_unixtimestamp", "macro_sql": "\n\n{%- macro trino__to_unixtimestamp(timestamp) -%}\n to_unixtime({{ timestamp }} AT TIME ZONE 'UTC')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.44851, "supported_languages": null}, "macro.dbt_date.n_days_away": {"name": "n_days_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_away.sql", "original_file_path": "macros/calendar_date/n_days_away.sql", "unique_id": "macro.dbt_date.n_days_away", "macro_sql": "{%- macro n_days_away(n, date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(-1 * n, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.448737, "supported_languages": null}, "macro.dbt_date.week_start": {"name": "week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.week_start", "macro_sql": "{%- macro week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.449213, "supported_languages": null}, "macro.dbt_date.default__week_start": {"name": "default__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.default__week_start", "macro_sql": "{%- macro default__week_start(date) -%}\ncast({{ dbt.date_trunc('week', date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.449354, "supported_languages": null}, "macro.dbt_date.snowflake__week_start": {"name": "snowflake__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.snowflake__week_start", "macro_sql": "\n\n{%- macro snowflake__week_start(date) -%}\n {#\n Get the day of week offset: e.g. if the date is a Sunday,\n dbt_date.day_of_week returns 1, so we subtract 1 to get a 0 offset\n #}\n {% set off_set = dbt_date.day_of_week(date, isoweek=False) ~ \" - 1\" %}\n cast({{ dbt.dateadd(\"day\", \"-1 * (\" ~ off_set ~ \")\", date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.day_of_week", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.449694, "supported_languages": null}, "macro.dbt_date.postgres__week_start": {"name": "postgres__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.postgres__week_start", "macro_sql": "\n\n{%- macro postgres__week_start(date) -%}\n-- Sunday as week start date\ncast({{ dbt.dateadd('day', -1, dbt.date_trunc('week', dbt.dateadd('day', 1, date))) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.449942, "supported_languages": null}, "macro.dbt_date.duckdb__week_start": {"name": "duckdb__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.duckdb__week_start", "macro_sql": "\n\n{%- macro duckdb__week_start(date) -%}\n{{ return(dbt_date.postgres__week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4500842, "supported_languages": null}, "macro.dbt_date.iso_week_start": {"name": "iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.iso_week_start", "macro_sql": "{%- macro iso_week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.450721, "supported_languages": null}, "macro.dbt_date._iso_week_start": {"name": "_iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date._iso_week_start", "macro_sql": "{%- macro _iso_week_start(date, week_type) -%}\ncast({{ dbt.date_trunc(week_type, date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.450871, "supported_languages": null}, "macro.dbt_date.default__iso_week_start": {"name": "default__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.default__iso_week_start", "macro_sql": "\n\n{%- macro default__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.451005, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_start": {"name": "snowflake__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_start", "macro_sql": "\n\n{%- macro snowflake__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.451137, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_start": {"name": "postgres__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.postgres__iso_week_start", "macro_sql": "\n\n{%- macro postgres__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4512682, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_start": {"name": "duckdb__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_start", "macro_sql": "\n\n{%- macro duckdb__iso_week_start(date) -%}\n{{ return(dbt_date.postgres__iso_week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.451403, "supported_languages": null}, "macro.dbt_date.spark__iso_week_start": {"name": "spark__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.spark__iso_week_start", "macro_sql": "\n\n{%- macro spark__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.45154, "supported_languages": null}, "macro.dbt_date.trino__iso_week_start": {"name": "trino__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.trino__iso_week_start", "macro_sql": "\n\n{%- macro trino__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.451673, "supported_languages": null}, "macro.dbt_date.n_days_ago": {"name": "n_days_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_ago.sql", "original_file_path": "macros/calendar_date/n_days_ago.sql", "unique_id": "macro.dbt_date.n_days_ago", "macro_sql": "{%- macro n_days_ago(n, date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{%- set n = n|int -%}\ncast({{ dbt.dateadd('day', -1 * n, dt) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4520571, "supported_languages": null}, "macro.dbt_date.last_week": {"name": "last_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_week.sql", "original_file_path": "macros/calendar_date/last_week.sql", "unique_id": "macro.dbt_date.last_week", "macro_sql": "{%- macro last_week(tz=None) -%}\n{{ dbt_date.n_weeks_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.452231, "supported_languages": null}, "macro.dbt_date.now": {"name": "now", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/now.sql", "original_file_path": "macros/calendar_date/now.sql", "unique_id": "macro.dbt_date.now", "macro_sql": "{%- macro now(tz=None) -%}\n{{ dbt_date.convert_timezone(dbt.current_timestamp(), tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.convert_timezone", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.452418, "supported_languages": null}, "macro.dbt_date.periods_since": {"name": "periods_since", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/periods_since.sql", "original_file_path": "macros/calendar_date/periods_since.sql", "unique_id": "macro.dbt_date.periods_since", "macro_sql": "{%- macro periods_since(date_col, period_name='day', tz=None) -%}\n{{ dbt.datediff(date_col, dbt_date.now(tz), period_name) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.452663, "supported_languages": null}, "macro.dbt_date.today": {"name": "today", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/today.sql", "original_file_path": "macros/calendar_date/today.sql", "unique_id": "macro.dbt_date.today", "macro_sql": "{%- macro today(tz=None) -%}\ncast({{ dbt_date.now(tz) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.452825, "supported_languages": null}, "macro.dbt_date.last_month": {"name": "last_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month.sql", "original_file_path": "macros/calendar_date/last_month.sql", "unique_id": "macro.dbt_date.last_month", "macro_sql": "{%- macro last_month(tz=None) -%}\n{{ dbt_date.n_months_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4529972, "supported_languages": null}, "macro.dbt_date.day_of_year": {"name": "day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.day_of_year", "macro_sql": "{%- macro day_of_year(date) -%}\n{{ adapter.dispatch('day_of_year', 'dbt_date') (date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.453387, "supported_languages": null}, "macro.dbt_date.default__day_of_year": {"name": "default__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.default__day_of_year", "macro_sql": "\n\n{%- macro default__day_of_year(date) -%}\n {{ dbt_date.date_part('dayofyear', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4535198, "supported_languages": null}, "macro.dbt_date.postgres__day_of_year": {"name": "postgres__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.postgres__day_of_year", "macro_sql": "\n\n{%- macro postgres__day_of_year(date) -%}\n {{ dbt_date.date_part('doy', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.453653, "supported_languages": null}, "macro.dbt_date.redshift__day_of_year": {"name": "redshift__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.redshift__day_of_year", "macro_sql": "\n\n{%- macro redshift__day_of_year(date) -%}\n cast({{ dbt_date.date_part('dayofyear', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.453826, "supported_languages": null}, "macro.dbt_date.spark__day_of_year": {"name": "spark__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.spark__day_of_year", "macro_sql": "\n\n{%- macro spark__day_of_year(date) -%}\n dayofyear({{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.453994, "supported_languages": null}, "macro.dbt_date.trino__day_of_year": {"name": "trino__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.trino__day_of_year", "macro_sql": "\n\n{%- macro trino__day_of_year(date) -%}\n {{ dbt_date.date_part('day_of_year', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.454128, "supported_languages": null}, "macro.dbt_date.round_timestamp": {"name": "round_timestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/round_timestamp.sql", "original_file_path": "macros/calendar_date/round_timestamp.sql", "unique_id": "macro.dbt_date.round_timestamp", "macro_sql": "{% macro round_timestamp(timestamp) %}\n {{ dbt.date_trunc(\"day\", dbt.dateadd(\"hour\", 12, timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4543529, "supported_languages": null}, "macro.dbt_date.from_unixtimestamp": {"name": "from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.from_unixtimestamp", "macro_sql": "{%- macro from_unixtimestamp(epochs, format=\"seconds\") -%}\n {{ adapter.dispatch('from_unixtimestamp', 'dbt_date') (epochs, format) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__from_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.456969, "supported_languages": null}, "macro.dbt_date.default__from_unixtimestamp": {"name": "default__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__from_unixtimestamp", "macro_sql": "\n\n{%- macro default__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp({{ epochs }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4572139, "supported_languages": null}, "macro.dbt_date.postgres__from_unixtimestamp": {"name": "postgres__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.postgres__from_unixtimestamp", "macro_sql": "\n\n{%- macro postgres__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.457462, "supported_languages": null}, "macro.dbt_date.snowflake__from_unixtimestamp": {"name": "snowflake__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__from_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n {%- set scale = 0 -%}\n {%- elif format == \"milliseconds\" -%}\n {%- set scale = 3 -%}\n {%- elif format == \"microseconds\" -%}\n {%- set scale = 6 -%}\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp_ntz({{ epochs }}, {{ scale }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.457906, "supported_languages": null}, "macro.dbt_date.bigquery__from_unixtimestamp": {"name": "bigquery__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__from_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n timestamp_seconds({{ epochs }})\n {%- elif format == \"milliseconds\" -%}\n timestamp_millis({{ epochs }})\n {%- elif format == \"microseconds\" -%}\n timestamp_micros({{ epochs }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.458275, "supported_languages": null}, "macro.dbt_date.trino__from_unixtimestamp": {"name": "trino__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__from_unixtimestamp", "macro_sql": "\n\n{%- macro trino__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n cast(from_unixtime({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"milliseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 6)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"microseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 3)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"nanoseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.458844, "supported_languages": null}, "macro.dbt_date.duckdb__from_unixtimestamp": {"name": "duckdb__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.duckdb__from_unixtimestamp", "macro_sql": "\n\n\n{%- macro duckdb__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4590988, "supported_languages": null}, "macro.dbt_date.n_months_ago": {"name": "n_months_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_ago.sql", "original_file_path": "macros/calendar_date/n_months_ago.sql", "unique_id": "macro.dbt_date.n_months_ago", "macro_sql": "{%- macro n_months_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.45945, "supported_languages": null}, "macro.dbt_date.date_part": {"name": "date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.date_part", "macro_sql": "{% macro date_part(datepart, date) -%}\n {{ adapter.dispatch('date_part', 'dbt_date') (datepart, date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.45978, "supported_languages": null}, "macro.dbt_date.default__date_part": {"name": "default__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.default__date_part", "macro_sql": "{% macro default__date_part(datepart, date) -%}\n date_part('{{ datepart }}', {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.45991, "supported_languages": null}, "macro.dbt_date.bigquery__date_part": {"name": "bigquery__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.bigquery__date_part", "macro_sql": "{% macro bigquery__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4600372, "supported_languages": null}, "macro.dbt_date.trino__date_part": {"name": "trino__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.trino__date_part", "macro_sql": "{% macro trino__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.460161, "supported_languages": null}, "macro.dbt_date.n_weeks_away": {"name": "n_weeks_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_away.sql", "original_file_path": "macros/calendar_date/n_weeks_away.sql", "unique_id": "macro.dbt_date.n_weeks_away", "macro_sql": "{%- macro n_weeks_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4604878, "supported_languages": null}, "macro.dbt_date.day_of_month": {"name": "day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.day_of_month", "macro_sql": "{%- macro day_of_month(date) -%}\n{{ dbt_date.date_part('day', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.460692, "supported_languages": null}, "macro.dbt_date.redshift__day_of_month": {"name": "redshift__day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.redshift__day_of_month", "macro_sql": "\n\n{%- macro redshift__day_of_month(date) -%}\ncast({{ dbt_date.date_part('day', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4608698, "supported_languages": null}, "macro.dbt_date.yesterday": {"name": "yesterday", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/yesterday.sql", "original_file_path": "macros/calendar_date/yesterday.sql", "unique_id": "macro.dbt_date.yesterday", "macro_sql": "{%- macro yesterday(date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.461073, "supported_languages": null}, "macro.dbt_date.day_of_week": {"name": "day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.day_of_week", "macro_sql": "{%- macro day_of_week(date, isoweek=true) -%}\n{{ adapter.dispatch('day_of_week', 'dbt_date') (date, isoweek) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.46387, "supported_languages": null}, "macro.dbt_date.default__day_of_week": {"name": "default__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.default__day_of_week", "macro_sql": "\n\n{%- macro default__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else {{ dow }}\n end\n {%- else -%}\n {{ dow }} + 1\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.464146, "supported_languages": null}, "macro.dbt_date.snowflake__day_of_week": {"name": "snowflake__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.snowflake__day_of_week", "macro_sql": "\n\n{%- macro snowflake__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'dayofweekiso' -%}\n {{ dbt_date.date_part(dow_part, date) }}\n {%- else -%}\n {%- set dow_part = 'dayofweek' -%}\n case\n when {{ dbt_date.date_part(dow_part, date) }} = 7 then 1\n else {{ dbt_date.date_part(dow_part, date) }} + 1\n end\n {%- endif -%}\n\n\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.464532, "supported_languages": null}, "macro.dbt_date.bigquery__day_of_week": {"name": "bigquery__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.bigquery__day_of_week", "macro_sql": "\n\n{%- macro bigquery__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (1) to Monday (2)\n when {{ dow }} = 1 then 7\n else {{ dow }} - 1\n end\n {%- else -%}\n {{ dow }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4648051, "supported_languages": null}, "macro.dbt_date.postgres__day_of_week": {"name": "postgres__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.postgres__day_of_week", "macro_sql": "\n\n\n{%- macro postgres__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'isodow' -%}\n -- Monday(1) to Sunday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} as {{ dbt.type_int() }})\n {%- else -%}\n {%- set dow_part = 'dow' -%}\n -- Sunday(1) to Saturday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} + 1 as {{ dbt.type_int() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.465204, "supported_languages": null}, "macro.dbt_date.redshift__day_of_week": {"name": "redshift__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.redshift__day_of_week", "macro_sql": "\n\n\n{%- macro redshift__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else cast({{ dow }} as {{ dbt.type_bigint() }})\n end\n {%- else -%}\n cast({{ dow }} + 1 as {{ dbt.type_bigint() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4655502, "supported_languages": null}, "macro.dbt_date.duckdb__day_of_week": {"name": "duckdb__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.duckdb__day_of_week", "macro_sql": "\n\n{%- macro duckdb__day_of_week(date, isoweek) -%}\n{{ return(dbt_date.postgres__day_of_week(date, isoweek)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4657102, "supported_languages": null}, "macro.dbt_date.spark__day_of_week": {"name": "spark__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.spark__day_of_week", "macro_sql": "\n\n\n{%- macro spark__day_of_week(date, isoweek) -%}\n\n {%- set dow = \"dayofweek_iso\" if isoweek else \"dayofweek\" -%}\n\n {{ dbt_date.date_part(dow, date) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.465913, "supported_languages": null}, "macro.dbt_date.trino__day_of_week": {"name": "trino__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.trino__day_of_week", "macro_sql": "\n\n\n{%- macro trino__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('day_of_week', date) -%}\n\n {%- if isoweek -%}\n {{ dow }}\n {%- else -%}\n case\n when {{ dow }} = 7 then 1\n else {{ dow }} + 1\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.466181, "supported_languages": null}, "macro.dbt_date.iso_week_end": {"name": "iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.iso_week_end", "macro_sql": "{%- macro iso_week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.default__iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.46661, "supported_languages": null}, "macro.dbt_date._iso_week_end": {"name": "_iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date._iso_week_end", "macro_sql": "{%- macro _iso_week_end(date, week_type) -%}\n{%- set dt = dbt_date.iso_week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.iso_week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4668071, "supported_languages": null}, "macro.dbt_date.default__iso_week_end": {"name": "default__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.default__iso_week_end", "macro_sql": "\n\n{%- macro default__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.466943, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_end": {"name": "snowflake__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_end", "macro_sql": "\n\n{%- macro snowflake__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4670742, "supported_languages": null}, "macro.dbt_date.n_weeks_ago": {"name": "n_weeks_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_ago.sql", "original_file_path": "macros/calendar_date/n_weeks_ago.sql", "unique_id": "macro.dbt_date.n_weeks_ago", "macro_sql": "{%- macro n_weeks_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.467413, "supported_languages": null}, "macro.dbt_date.month_name": {"name": "month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.month_name", "macro_sql": "{%- macro month_name(date, short=True) -%}\n {{ adapter.dispatch('month_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__month_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4680681, "supported_languages": null}, "macro.dbt_date.default__month_name": {"name": "default__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.default__month_name", "macro_sql": "\n\n{%- macro default__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MONTH' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.468261, "supported_languages": null}, "macro.dbt_date.bigquery__month_name": {"name": "bigquery__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.bigquery__month_name", "macro_sql": "\n\n{%- macro bigquery__month_name(date, short) -%}\n{%- set f = '%b' if short else '%B' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.468447, "supported_languages": null}, "macro.dbt_date.snowflake__month_name": {"name": "snowflake__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.snowflake__month_name", "macro_sql": "\n\n{%- macro snowflake__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MMMM' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4686341, "supported_languages": null}, "macro.dbt_date.postgres__month_name": {"name": "postgres__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.postgres__month_name", "macro_sql": "\n\n{%- macro postgres__month_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMMon' if short else 'FMMonth' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.468826, "supported_languages": null}, "macro.dbt_date.duckdb__month_name": {"name": "duckdb__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.duckdb__month_name", "macro_sql": "\n\n\n{%- macro duckdb__month_name(date, short) -%}\n {%- if short -%}\n substr(monthname({{ date }}), 1, 3)\n {%- else -%}\n monthname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.469008, "supported_languages": null}, "macro.dbt_date.spark__month_name": {"name": "spark__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.spark__month_name", "macro_sql": "\n\n{%- macro spark__month_name(date, short) -%}\n{%- set f = 'MMM' if short else 'MMMM' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.46926, "supported_languages": null}, "macro.dbt_date.trino__month_name": {"name": "trino__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.trino__month_name", "macro_sql": "\n\n{%- macro trino__month_name(date, short) -%}\n{%- set f = 'b' if short else 'M' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4694521, "supported_languages": null}, "macro.dbt_date.last_month_name": {"name": "last_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_name.sql", "original_file_path": "macros/calendar_date/last_month_name.sql", "unique_id": "macro.dbt_date.last_month_name", "macro_sql": "{%- macro last_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.last_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.469684, "supported_languages": null}, "macro.dbt_date.week_of_year": {"name": "week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.week_of_year", "macro_sql": "{%- macro week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.470113, "supported_languages": null}, "macro.dbt_date.default__week_of_year": {"name": "default__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.default__week_of_year", "macro_sql": "{%- macro default__week_of_year(date) -%}\ncast({{ dbt_date.date_part('week', date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4702878, "supported_languages": null}, "macro.dbt_date.postgres__week_of_year": {"name": "postgres__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.postgres__week_of_year", "macro_sql": "\n\n{%- macro postgres__week_of_year(date) -%}\n{# postgresql 'week' returns isoweek. Use to_char instead.\n WW = the first week starts on the first day of the year #}\ncast(to_char({{ date }}, 'WW') as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.470433, "supported_languages": null}, "macro.dbt_date.duckdb__week_of_year": {"name": "duckdb__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__week_of_year", "macro_sql": "\n\n{%- macro duckdb__week_of_year(date) -%}\ncast(ceil(dayofyear({{ date }}) / 7) as int)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4705322, "supported_languages": null}, "macro.dbt_date.convert_timezone": {"name": "convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.convert_timezone", "macro_sql": "{%- macro convert_timezone(column, target_tz=None, source_tz=None) -%}\n{%- set source_tz = \"UTC\" if not source_tz else source_tz -%}\n{%- set target_tz = var(\"dbt_date:time_zone\") if not target_tz else target_tz -%}\n{{ adapter.dispatch('convert_timezone', 'dbt_date') (column, target_tz, source_tz) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.postgres__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4717681, "supported_languages": null}, "macro.dbt_date.default__convert_timezone": {"name": "default__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.default__convert_timezone", "macro_sql": "{% macro default__convert_timezone(column, target_tz, source_tz) -%}\nconvert_timezone('{{ source_tz }}', '{{ target_tz }}',\n cast({{ column }} as {{ dbt.type_timestamp() }})\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.471961, "supported_languages": null}, "macro.dbt_date.bigquery__convert_timezone": {"name": "bigquery__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.bigquery__convert_timezone", "macro_sql": "{%- macro bigquery__convert_timezone(column, target_tz, source_tz=None) -%}\ntimestamp(datetime({{ column }}, '{{ target_tz}}'))\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4721122, "supported_languages": null}, "macro.dbt_date.postgres__convert_timezone": {"name": "postgres__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.postgres__convert_timezone", "macro_sql": "{% macro postgres__convert_timezone(column, target_tz, source_tz) -%}\ncast(\n cast({{ column }} as {{ dbt.type_timestamp() }})\n at time zone '{{ source_tz }}' at time zone '{{ target_tz }}' as {{ dbt.type_timestamp() }}\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.472333, "supported_languages": null}, "macro.dbt_date.redshift__convert_timezone": {"name": "redshift__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.redshift__convert_timezone", "macro_sql": "{%- macro redshift__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.default__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.default__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.472519, "supported_languages": null}, "macro.dbt_date.duckdb__convert_timezone": {"name": "duckdb__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.duckdb__convert_timezone", "macro_sql": "{% macro duckdb__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.postgres__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.postgres__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4726982, "supported_languages": null}, "macro.dbt_date.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.spark__convert_timezone", "macro_sql": "{%- macro spark__convert_timezone(column, target_tz, source_tz) -%}\nfrom_utc_timestamp(\n to_utc_timestamp({{ column }}, '{{ source_tz }}'),\n '{{ target_tz }}'\n )\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.472854, "supported_languages": null}, "macro.dbt_date.trino__convert_timezone": {"name": "trino__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.trino__convert_timezone", "macro_sql": "{%- macro trino__convert_timezone(column, target_tz, source_tz) -%}\n cast((at_timezone(with_timezone(cast({{ column }} as {{ dbt.type_timestamp() }}), '{{ source_tz }}'), '{{ target_tz }}')) as {{ dbt.type_timestamp() }})\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.473073, "supported_languages": null}, "macro.dbt_date.n_months_away": {"name": "n_months_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_away.sql", "original_file_path": "macros/calendar_date/n_months_away.sql", "unique_id": "macro.dbt_date.n_months_away", "macro_sql": "{%- macro n_months_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4734, "supported_languages": null}, "macro.dbt_date.iso_week_of_year": {"name": "iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.iso_week_of_year", "macro_sql": "{%- macro iso_week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.474103, "supported_languages": null}, "macro.dbt_date._iso_week_of_year": {"name": "_iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date._iso_week_of_year", "macro_sql": "{%- macro _iso_week_of_year(date, week_type) -%}\ncast({{ dbt_date.date_part(week_type, date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.474282, "supported_languages": null}, "macro.dbt_date.default__iso_week_of_year": {"name": "default__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.default__iso_week_of_year", "macro_sql": "\n\n{%- macro default__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.474427, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_of_year": {"name": "snowflake__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_of_year", "macro_sql": "\n\n{%- macro snowflake__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4745631, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_of_year": {"name": "postgres__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.postgres__iso_week_of_year", "macro_sql": "\n\n{%- macro postgres__iso_week_of_year(date) -%}\n-- postgresql week is isoweek, the first week of a year containing January 4 of that year.\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4747689, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_of_year": {"name": "duckdb__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_of_year", "macro_sql": "\n\n{%- macro duckdb__iso_week_of_year(date) -%}\n{{ return(dbt_date.postgres__iso_week_of_year(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.474905, "supported_languages": null}, "macro.dbt_date.spark__iso_week_of_year": {"name": "spark__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.spark__iso_week_of_year", "macro_sql": "\n\n{%- macro spark__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4750369, "supported_languages": null}, "macro.dbt_date.trino__iso_week_of_year": {"name": "trino__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.trino__iso_week_of_year", "macro_sql": "\n\n{%- macro trino__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.475172, "supported_languages": null}, "macro.dbt_date.week_end": {"name": "week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.week_end", "macro_sql": "{%- macro week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.475651, "supported_languages": null}, "macro.dbt_date.default__week_end": {"name": "default__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.default__week_end", "macro_sql": "{%- macro default__week_end(date) -%}\n{{ last_day(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.475779, "supported_languages": null}, "macro.dbt_date.snowflake__week_end": {"name": "snowflake__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.snowflake__week_end", "macro_sql": "\n\n{%- macro snowflake__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.475969, "supported_languages": null}, "macro.dbt_date.postgres__week_end": {"name": "postgres__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.postgres__week_end", "macro_sql": "\n\n{%- macro postgres__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.47616, "supported_languages": null}, "macro.dbt_date.duckdb__week_end": {"name": "duckdb__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.duckdb__week_end", "macro_sql": "\n\n{%- macro duckdb__week_end(date) -%}\n{{ return(dbt_date.postgres__week_end(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.476295, "supported_languages": null}, "macro.dbt_date.next_month_number": {"name": "next_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_number.sql", "original_file_path": "macros/calendar_date/next_month_number.sql", "unique_id": "macro.dbt_date.next_month_number", "macro_sql": "{%- macro next_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.next_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4764988, "supported_languages": null}, "macro.dbt_date.last_month_number": {"name": "last_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_number.sql", "original_file_path": "macros/calendar_date/last_month_number.sql", "unique_id": "macro.dbt_date.last_month_number", "macro_sql": "{%- macro last_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.last_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.476702, "supported_languages": null}, "macro.fivetran_utils.enabled_vars": {"name": "enabled_vars", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars.sql", "original_file_path": "macros/enabled_vars.sql", "unique_id": "macro.fivetran_utils.enabled_vars", "macro_sql": "{% macro enabled_vars(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, True) == False %}\n {{ return(False) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(True) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4770718, "supported_languages": null}, "macro.fivetran_utils.percentile": {"name": "percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.percentile", "macro_sql": "{% macro percentile(percentile_field, partition_field, percent) -%}\n\n{{ adapter.dispatch('percentile', 'fivetran_utils') (percentile_field, partition_field, percent) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__percentile"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.478007, "supported_languages": null}, "macro.fivetran_utils.default__percentile": {"name": "default__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.default__percentile", "macro_sql": "{% macro default__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4781692, "supported_languages": null}, "macro.fivetran_utils.redshift__percentile": {"name": "redshift__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.redshift__percentile", "macro_sql": "{% macro redshift__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.478328, "supported_languages": null}, "macro.fivetran_utils.bigquery__percentile": {"name": "bigquery__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.bigquery__percentile", "macro_sql": "{% macro bigquery__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.47849, "supported_languages": null}, "macro.fivetran_utils.postgres__percentile": {"name": "postgres__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.postgres__percentile", "macro_sql": "{% macro postgres__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n /* have to group by partition field */\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4786298, "supported_languages": null}, "macro.fivetran_utils.spark__percentile": {"name": "spark__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.spark__percentile", "macro_sql": "{% macro spark__percentile(percentile_field, partition_field, percent) %}\n\n percentile( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.478789, "supported_languages": null}, "macro.fivetran_utils.pivot_json_extract": {"name": "pivot_json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/pivot_json_extract.sql", "original_file_path": "macros/pivot_json_extract.sql", "unique_id": "macro.fivetran_utils.pivot_json_extract", "macro_sql": "{% macro pivot_json_extract(string, list_of_properties) %}\n\n{%- for property in list_of_properties -%}\n{%- if property is mapping -%}\nreplace( {{ fivetran_utils.json_extract(string, property.name) }}, '\"', '') as {{ property.alias if property.alias else property.name | replace(' ', '_') | replace('.', '_') | lower }}\n\n{%- else -%}\nreplace( {{ fivetran_utils.json_extract(string, property) }}, '\"', '') as {{ property | replace(' ', '_') | lower }}\n\n{%- endif -%}\n{%- if not loop.last -%},{%- endif %}\n{% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.479619, "supported_languages": null}, "macro.fivetran_utils.persist_pass_through_columns": {"name": "persist_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/persist_pass_through_columns.sql", "original_file_path": "macros/persist_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.persist_pass_through_columns", "macro_sql": "{% macro persist_pass_through_columns(pass_through_variable, identifier=none, transform='') %}\n\n{% if var(pass_through_variable, none) %}\n {% for field in var(pass_through_variable) %}\n , {{ transform ~ '(' ~ (identifier ~ '.' if identifier else '') ~ (field.alias if field.alias else field.name) ~ ')' }} as {{ field.alias if field.alias else field.name }}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.480234, "supported_languages": null}, "macro.fivetran_utils.json_parse": {"name": "json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.json_parse", "macro_sql": "{% macro json_parse(string, string_path) -%}\n\n{{ adapter.dispatch('json_parse', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__json_parse"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4816298, "supported_languages": null}, "macro.fivetran_utils.default__json_parse": {"name": "default__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.default__json_parse", "macro_sql": "{% macro default__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.481877, "supported_languages": null}, "macro.fivetran_utils.redshift__json_parse": {"name": "redshift__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.redshift__json_parse", "macro_sql": "{% macro redshift__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.482122, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_parse": {"name": "bigquery__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.bigquery__json_parse", "macro_sql": "{% macro bigquery__json_parse(string, string_path) %}\n\n \n json_extract_scalar({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.482363, "supported_languages": null}, "macro.fivetran_utils.postgres__json_parse": {"name": "postgres__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.postgres__json_parse", "macro_sql": "{% macro postgres__json_parse(string, string_path) %}\n\n {{string}}::json #>> '{ {%- for s in string_path -%}{{ s }}{%- if not loop.last -%},{%- endif -%}{%- endfor -%} }'\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4826071, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_parse": {"name": "snowflake__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.snowflake__json_parse", "macro_sql": "{% macro snowflake__json_parse(string, string_path) %}\n\n parse_json( {{string}} ) {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4828732, "supported_languages": null}, "macro.fivetran_utils.spark__json_parse": {"name": "spark__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.spark__json_parse", "macro_sql": "{% macro spark__json_parse(string, string_path) %}\n\n {{string}} : {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.48314, "supported_languages": null}, "macro.fivetran_utils.sqlserver__json_parse": {"name": "sqlserver__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.sqlserver__json_parse", "macro_sql": "{% macro sqlserver__json_parse(string, string_path) %}\n\n json_value({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.483374, "supported_languages": null}, "macro.fivetran_utils.max_bool": {"name": "max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.max_bool", "macro_sql": "{% macro max_bool(boolean_field) -%}\n\n{{ adapter.dispatch('max_bool', 'fivetran_utils') (boolean_field) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__max_bool"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.483707, "supported_languages": null}, "macro.fivetran_utils.default__max_bool": {"name": "default__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.default__max_bool", "macro_sql": "{% macro default__max_bool(boolean_field) %}\n\n bool_or( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.483815, "supported_languages": null}, "macro.fivetran_utils.snowflake__max_bool": {"name": "snowflake__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.snowflake__max_bool", "macro_sql": "{% macro snowflake__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.483924, "supported_languages": null}, "macro.fivetran_utils.bigquery__max_bool": {"name": "bigquery__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.bigquery__max_bool", "macro_sql": "{% macro bigquery__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4840288, "supported_languages": null}, "macro.fivetran_utils.calculated_fields": {"name": "calculated_fields", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/calculated_fields.sql", "original_file_path": "macros/calculated_fields.sql", "unique_id": "macro.fivetran_utils.calculated_fields", "macro_sql": "{% macro calculated_fields(variable) -%}\n\n{% if var(variable, none) %}\n {% for field in var(variable) %}\n , {{ field.transform_sql }} as {{ field.name }} \n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.484395, "supported_languages": null}, "macro.fivetran_utils.drop_schemas_automation": {"name": "drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.drop_schemas_automation", "macro_sql": "{% macro drop_schemas_automation(drop_target_schema=true) %}\n {{ return(adapter.dispatch('drop_schemas_automation', 'fivetran_utils')(drop_target_schema)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__drop_schemas_automation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.485146, "supported_languages": null}, "macro.fivetran_utils.default__drop_schemas_automation": {"name": "default__drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.default__drop_schemas_automation", "macro_sql": "{% macro default__drop_schemas_automation(drop_target_schema=true) %}\n\n{% set fetch_list_sql %}\n {% if target.type not in ('databricks', 'spark') %}\n select schema_name\n from \n {{ wrap_in_quotes(target.database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like '{{ target.schema | lower }}{%- if not drop_target_schema -%}_{%- endif -%}%'\n {% else %}\n SHOW SCHEMAS LIKE '{{ target.schema }}{%- if not drop_target_schema -%}_{%- endif -%}*'\n {% endif %}\n{% endset %}\n\n{% set results = run_query(fetch_list_sql) %}\n\n{% if execute %}\n {% set results_list = results.columns[0].values() %}\n{% else %}\n {% set results_list = [] %}\n{% endif %}\n\n{% for schema_to_drop in results_list %}\n {% do adapter.drop_schema(api.Relation.create(database=target.database, schema=schema_to_drop)) %}\n {{ print('Schema ' ~ schema_to_drop ~ ' successfully dropped from the ' ~ target.database ~ ' database.\\n')}}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.wrap_in_quotes", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.486232, "supported_languages": null}, "macro.fivetran_utils.seed_data_helper": {"name": "seed_data_helper", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/seed_data_helper.sql", "original_file_path": "macros/seed_data_helper.sql", "unique_id": "macro.fivetran_utils.seed_data_helper", "macro_sql": "{% macro seed_data_helper(seed_name, warehouses) %}\n\n{% if target.type in warehouses %}\n {% for w in warehouses %}\n {% if target.type == w %}\n {{ return(ref(seed_name ~ \"_\" ~ w ~ \"\")) }}\n {% endif %}\n {% endfor %}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.486815, "supported_languages": null}, "macro.fivetran_utils.fill_pass_through_columns": {"name": "fill_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_pass_through_columns.sql", "original_file_path": "macros/fill_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.fill_pass_through_columns", "macro_sql": "{% macro fill_pass_through_columns(pass_through_variable) %}\n\n{% if var(pass_through_variable) %}\n {% for field in var(pass_through_variable) %}\n {% if field is mapping %}\n {% if field.transform_sql %}\n , {{ field.transform_sql }} as {{ field.alias if field.alias else field.name }}\n {% else %}\n , {{ field.alias if field.alias else field.name }}\n {% endif %}\n {% else %}\n , {{ field }}\n {% endif %}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.487605, "supported_languages": null}, "macro.fivetran_utils.string_agg": {"name": "string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.string_agg", "macro_sql": "{% macro string_agg(field_to_agg, delimiter) -%}\n\n{{ adapter.dispatch('string_agg', 'fivetran_utils') (field_to_agg, delimiter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__string_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.488108, "supported_languages": null}, "macro.fivetran_utils.default__string_agg": {"name": "default__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.default__string_agg", "macro_sql": "{% macro default__string_agg(field_to_agg, delimiter) %}\n string_agg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.488245, "supported_languages": null}, "macro.fivetran_utils.snowflake__string_agg": {"name": "snowflake__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.snowflake__string_agg", "macro_sql": "{% macro snowflake__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.488375, "supported_languages": null}, "macro.fivetran_utils.redshift__string_agg": {"name": "redshift__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.redshift__string_agg", "macro_sql": "{% macro redshift__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.488509, "supported_languages": null}, "macro.fivetran_utils.spark__string_agg": {"name": "spark__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.spark__string_agg", "macro_sql": "{% macro spark__string_agg(field_to_agg, delimiter) %}\n -- collect set will remove duplicates\n replace(replace(replace(cast( collect_set({{ field_to_agg }}) as string), '[', ''), ']', ''), ', ', {{ delimiter }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.488647, "supported_languages": null}, "macro.fivetran_utils.timestamp_diff": {"name": "timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.timestamp_diff", "macro_sql": "{% macro timestamp_diff(first_date, second_date, datepart) %}\n {{ adapter.dispatch('timestamp_diff', 'fivetran_utils')(first_date, second_date, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__timestamp_diff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.491751, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_diff": {"name": "default__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.default__timestamp_diff", "macro_sql": "{% macro default__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.491915, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_diff": {"name": "redshift__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_diff", "macro_sql": "{% macro redshift__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.492077, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_diff": {"name": "bigquery__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_diff", "macro_sql": "{% macro bigquery__timestamp_diff(first_date, second_date, datepart) %}\n\n timestamp_diff(\n {{second_date}},\n {{first_date}},\n {{datepart}}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.492235, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_diff": {"name": "postgres__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_diff", "macro_sql": "{% macro postgres__timestamp_diff(first_date, second_date, datepart) %}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ dbt.datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.493963, "supported_languages": null}, "macro.fivetran_utils.try_cast": {"name": "try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.try_cast", "macro_sql": "{% macro try_cast(field, type) %}\n {{ adapter.dispatch('try_cast', 'fivetran_utils') (field, type) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__try_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.494942, "supported_languages": null}, "macro.fivetran_utils.default__try_cast": {"name": "default__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.default__try_cast", "macro_sql": "{% macro default__try_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.495083, "supported_languages": null}, "macro.fivetran_utils.redshift__try_cast": {"name": "redshift__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.redshift__try_cast", "macro_sql": "{% macro redshift__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when trim({{field}}) ~ '^(0|[1-9][0-9]*)$' then trim({{field}})\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.495351, "supported_languages": null}, "macro.fivetran_utils.postgres__try_cast": {"name": "postgres__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.postgres__try_cast", "macro_sql": "{% macro postgres__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar)) ~ '^(0|[1-9][0-9]*)$' \n then replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar))\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.495637, "supported_languages": null}, "macro.fivetran_utils.snowflake__try_cast": {"name": "snowflake__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.snowflake__try_cast", "macro_sql": "{% macro snowflake__try_cast(field, type) %}\n try_cast(cast({{field}} as varchar) as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4957702, "supported_languages": null}, "macro.fivetran_utils.bigquery__try_cast": {"name": "bigquery__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.bigquery__try_cast", "macro_sql": "{% macro bigquery__try_cast(field, type) %}\n safe_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.495896, "supported_languages": null}, "macro.fivetran_utils.spark__try_cast": {"name": "spark__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.spark__try_cast", "macro_sql": "{% macro spark__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.496022, "supported_languages": null}, "macro.fivetran_utils.sqlserver__try_cast": {"name": "sqlserver__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.sqlserver__try_cast", "macro_sql": "{% macro sqlserver__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4961522, "supported_languages": null}, "macro.fivetran_utils.source_relation": {"name": "source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.source_relation", "macro_sql": "{% macro source_relation(union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('source_relation', 'fivetran_utils') (union_schema_variable, union_database_variable) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__source_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4966452, "supported_languages": null}, "macro.fivetran_utils.default__source_relation": {"name": "default__source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.default__source_relation", "macro_sql": "{% macro default__source_relation(union_schema_variable, union_database_variable) %}\n\n{% if var(union_schema_variable, none) %}\n, case\n {% for schema in var(union_schema_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%.{{ schema|lower }}.%' then '{{ schema|lower }}'\n {% endfor %}\n end as source_relation\n{% elif var(union_database_variable, none) %}\n, case\n {% for database in var(union_database_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%{{ database|lower }}.%' then '{{ database|lower }}'\n {% endfor %}\n end as source_relation\n{% else %}\n, cast('' as {{ dbt.type_string() }}) as source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.497209, "supported_languages": null}, "macro.fivetran_utils.first_value": {"name": "first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.first_value", "macro_sql": "{% macro first_value(first_value_field, partition_field, order_by_field, order=\"asc\") -%}\n\n{{ adapter.dispatch('first_value', 'fivetran_utils') (first_value_field, partition_field, order_by_field, order) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__first_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.497727, "supported_languages": null}, "macro.fivetran_utils.default__first_value": {"name": "default__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.default__first_value", "macro_sql": "{% macro default__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.497947, "supported_languages": null}, "macro.fivetran_utils.redshift__first_value": {"name": "redshift__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.redshift__first_value", "macro_sql": "{% macro redshift__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} , {{ partition_field }} rows unbounded preceding )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.498177, "supported_languages": null}, "macro.fivetran_utils.add_dbt_source_relation": {"name": "add_dbt_source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_dbt_source_relation.sql", "original_file_path": "macros/add_dbt_source_relation.sql", "unique_id": "macro.fivetran_utils.add_dbt_source_relation", "macro_sql": "{% macro add_dbt_source_relation() %}\n\n{% if var('union_schemas', none) or var('union_databases', none) %}\n, _dbt_source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.4984431, "supported_languages": null}, "macro.fivetran_utils.add_pass_through_columns": {"name": "add_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_pass_through_columns.sql", "original_file_path": "macros/add_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.add_pass_through_columns", "macro_sql": "{% macro add_pass_through_columns(base_columns, pass_through_var) %}\n\n {% if pass_through_var %}\n\n {% for column in pass_through_var %}\n\n {% if column is mapping %}\n\n {% if column.alias %}\n\n {% do base_columns.append({ \"name\": column.name, \"alias\": column.alias, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column.name, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n \n {% endif %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column, \"datatype\": dbt.type_string()}) %}\n\n {% endif %}\n\n {% endfor %}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.499943, "supported_languages": null}, "macro.fivetran_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, aliases=none, column_override=none, include=[], exclude=[], source_column_name=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n {%- set source_column_name = source_column_name if source_column_name is not none else '_dbt_source_relation' -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column in exclude -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column not in include -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ aliases[loop.index0] if aliases else relation }}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.504143, "supported_languages": null}, "macro.fivetran_utils.union_tables": {"name": "union_tables", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_tables", "macro_sql": "{%- macro union_tables(tables, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_table') -%}\n\n {%- do exceptions.warn(\"Warning: the `union_tables` macro is no longer supported and will be deprecated in a future release of dbt-utils. Use the `union_relations` macro instead\") -%}\n\n {{ return(dbt_utils.union_relations(tables, column_override, include, exclude, source_column_name)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.504503, "supported_languages": null}, "macro.fivetran_utils.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.fivetran_utils.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5048609, "supported_languages": null}, "macro.fivetran_utils.fill_staging_columns": {"name": "fill_staging_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.fill_staging_columns", "macro_sql": "{% macro fill_staging_columns(source_columns, staging_columns) -%}\n\n{%- set source_column_names = source_columns|map(attribute='name')|map('lower')|list -%}\n\n{%- for column in staging_columns %}\n {% if column.name|lower in source_column_names -%}\n {{ fivetran_utils.quote_column(column) }} as \n {%- if 'alias' in column %} {{ column.alias }} {% else %} {{ fivetran_utils.quote_column(column) }} {%- endif -%}\n {%- else -%}\n cast(null as {{ column.datatype }})\n {%- if 'alias' in column %} as {{ column.alias }} {% else %} as {{ fivetran_utils.quote_column(column) }} {% endif -%}\n {%- endif -%}\n {%- if not loop.last -%} , {% endif -%}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.quote_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.506405, "supported_languages": null}, "macro.fivetran_utils.quote_column": {"name": "quote_column", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.quote_column", "macro_sql": "{% macro quote_column(column) %}\n {% if 'quote' in column %}\n {% if column.quote %}\n {% if target.type in ('bigquery', 'spark', 'databricks') %}\n `{{ column.name }}`\n {% elif target.type == 'snowflake' %}\n \"{{ column.name | upper }}\"\n {% else %}\n \"{{ column.name }}\"\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.506933, "supported_languages": null}, "macro.fivetran_utils.json_extract": {"name": "json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.json_extract", "macro_sql": "{% macro json_extract(string, string_path) -%}\n\n{{ adapter.dispatch('json_extract', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.507492, "supported_languages": null}, "macro.fivetran_utils.default__json_extract": {"name": "default__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.default__json_extract", "macro_sql": "{% macro default__json_extract(string, string_path) %}\n\n json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} )\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.507647, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_extract": {"name": "snowflake__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.snowflake__json_extract", "macro_sql": "{% macro snowflake__json_extract(string, string_path) %}\n\n json_extract_path_text(try_parse_json( {{string}} ), {{ \"'\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.507797, "supported_languages": null}, "macro.fivetran_utils.redshift__json_extract": {"name": "redshift__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.redshift__json_extract", "macro_sql": "{% macro redshift__json_extract(string, string_path) %}\n\n case when is_valid_json( {{string}} ) then json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} ) else null end\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5079622, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_extract": {"name": "bigquery__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.bigquery__json_extract", "macro_sql": "{% macro bigquery__json_extract(string, string_path) %}\n\n json_extract_scalar({{string}}, {{ \"'$.\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.508111, "supported_languages": null}, "macro.fivetran_utils.postgres__json_extract": {"name": "postgres__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.postgres__json_extract", "macro_sql": "{% macro postgres__json_extract(string, string_path) %}\n\n {{string}}::json->>{{\"'\" ~ string_path ~ \"'\" }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5082529, "supported_languages": null}, "macro.fivetran_utils.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.50927, "supported_languages": null}, "macro.fivetran_utils.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n\n {%- set enabled_array = [] -%}\n {% for node in graph.sources.values() %}\n {% if node.identifier == source.identifier %}\n {% if (node.meta['is_enabled'] | default(true)) %}\n {%- do enabled_array.append(1) -%}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% set is_enabled = (enabled_array != []) %}\n\n select\n {% if is_enabled %}\n max({{ loaded_at_field }})\n {% else %} \n {{ current_timestamp() }} {% endif %} as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n\n {% if is_enabled %}\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endif %}\n\n {% endcall %}\n\n {% if dbt_version.split('.') | map('int') | list >= [1, 5, 0] %}\n {{ return(load_result('collect_freshness')) }}\n {% else %}\n {{ return(load_result('collect_freshness').table) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5369642, "supported_languages": null}, "macro.fivetran_utils.timestamp_add": {"name": "timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.timestamp_add", "macro_sql": "{% macro timestamp_add(datepart, interval, from_timestamp) -%}\n\n{{ adapter.dispatch('timestamp_add', 'fivetran_utils') (datepart, interval, from_timestamp) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__timestamp_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.537741, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_add": {"name": "default__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.default__timestamp_add", "macro_sql": "{% macro default__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestampadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.537915, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_add": {"name": "bigquery__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_add", "macro_sql": "{% macro bigquery__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestamp_add({{ from_timestamp }}, interval {{ interval }} {{ datepart }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.538078, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_add": {"name": "redshift__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_add", "macro_sql": "{% macro redshift__timestamp_add(datepart, interval, from_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.538239, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_add": {"name": "postgres__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_add", "macro_sql": "{% macro postgres__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ from_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.538402, "supported_languages": null}, "macro.fivetran_utils.spark__timestamp_add": {"name": "spark__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.spark__timestamp_add", "macro_sql": "{% macro spark__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ dbt.dateadd(datepart, interval, from_timestamp) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5385711, "supported_languages": null}, "macro.fivetran_utils.ceiling": {"name": "ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.ceiling", "macro_sql": "{% macro ceiling(num) -%}\n\n{{ adapter.dispatch('ceiling', 'fivetran_utils') (num) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__ceiling"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.538829, "supported_languages": null}, "macro.fivetran_utils.default__ceiling": {"name": "default__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.default__ceiling", "macro_sql": "{% macro default__ceiling(num) %}\n ceiling({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.538935, "supported_languages": null}, "macro.fivetran_utils.snowflake__ceiling": {"name": "snowflake__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.snowflake__ceiling", "macro_sql": "{% macro snowflake__ceiling(num) %}\n ceil({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.539037, "supported_languages": null}, "macro.fivetran_utils.remove_prefix_from_columns": {"name": "remove_prefix_from_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/remove_prefix_from_columns.sql", "original_file_path": "macros/remove_prefix_from_columns.sql", "unique_id": "macro.fivetran_utils.remove_prefix_from_columns", "macro_sql": "{% macro remove_prefix_from_columns(columns, prefix='', exclude=[]) %}\n\n {%- for col in columns if col.name not in exclude -%}\n {%- if col.name[:prefix|length]|lower == prefix -%}\n {{ col.name }} as {{ col.name[prefix|length:] }}\n {%- else -%}\n {{ col.name }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.539671, "supported_languages": null}, "macro.fivetran_utils.fivetran_date_spine": {"name": "fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.fivetran_date_spine", "macro_sql": "{% macro fivetran_date_spine(datepart, start_date, end_date) -%}\n\n{{ return(adapter.dispatch('fivetran_date_spine', 'fivetran_utils') (datepart, start_date, end_date)) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__fivetran_date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.541038, "supported_languages": null}, "macro.fivetran_utils.default__fivetran_date_spine": {"name": "default__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.default__fivetran_date_spine", "macro_sql": "{% macro default__fivetran_date_spine(datepart, start_date, end_date) %}\n\n {{ dbt_utils.date_spine(datepart, start_date, end_date) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.541209, "supported_languages": null}, "macro.fivetran_utils.sqlserver__fivetran_date_spine": {"name": "sqlserver__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.sqlserver__fivetran_date_spine", "macro_sql": "{% macro sqlserver__fivetran_date_spine(datepart, start_date, end_date) -%}\n\n {% set date_spine_query %}\n with\n\n l0 as (\n\n select c\n from (select 1 union all select 1) as d(c)\n\n ),\n l1 as (\n\n select\n 1 as c\n from l0 as a\n cross join l0 as b\n\n ),\n\n l2 as (\n\n select 1 as c\n from l1 as a\n cross join l1 as b\n ),\n\n l3 as (\n\n select 1 as c\n from l2 as a\n cross join l2 as b\n ),\n\n l4 as (\n\n select 1 as c\n from l3 as a\n cross join l3 as b\n ),\n\n l5 as (\n\n select 1 as c\n from l4 as a\n cross join l4 as b\n ),\n\n nums as (\n\n select row_number() over (order by (select null)) as rownum\n from l5\n ),\n\n rawdata as (\n\n select top ({{dbt.datediff(start_date, end_date, datepart)}}) rownum -1 as n\n from nums\n order by rownum\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n 'n',\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n order by 1\n\n {% endset %}\n\n {% set results = run_query(date_spine_query) %}\n\n {% if execute %}\n\n {% set results_list = results.columns[0].values() %}\n \n {% else %}\n\n {% set results_list = [] %}\n\n {% endif %}\n\n {%- for date_field in results_list %}\n select cast('{{ date_field }}' as date) as date_{{datepart}} {{ 'union all ' if not loop.last else '' }}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt.dateadd", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.54203, "supported_languages": null}, "macro.fivetran_utils.union_data": {"name": "union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.union_data", "macro_sql": "{%- macro union_data(table_identifier, database_variable, schema_variable, default_database, default_schema, default_variable, union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('union_data', 'fivetran_utils') (\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.default__union_data"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.545598, "supported_languages": null}, "macro.fivetran_utils.default__union_data": {"name": "default__union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.default__union_data", "macro_sql": "{%- macro default__union_data(\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) -%}\n\n{%- if var(union_schema_variable, none) -%}\n\n {%- set relations = [] -%}\n \n {%- if var(union_schema_variable) is string -%}\n {%- set trimmed = var(union_schema_variable)|trim('[')|trim(']') -%}\n {%- set schemas = trimmed.split(',')|map('trim',\" \")|map('trim','\"')|map('trim',\"'\") -%}\n {%- else -%}\n {%- set schemas = var(union_schema_variable) -%}\n {%- endif -%}\n\n {%- for schema in var(union_schema_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else var(database_variable, default_database),\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else schema,\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n \n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n \n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- elif var(union_database_variable, none) -%}\n\n {%- set relations = [] -%}\n\n {%- for database in var(union_database_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else database,\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else var(schema_variable, default_schema),\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n\n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n\n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- else -%}\n {% set exception_schemas = {\"linkedin_company_pages\": \"linkedin_pages\", \"instagram_business_pages\": \"instagram_business\"} %}\n {% set relation = namespace(value=\"\") %}\n {% if default_schema in exception_schemas.keys() %}\n {% for corrected_schema_name in exception_schemas.items() %} \n {% if default_schema in corrected_schema_name %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = corrected_schema_name[1] + \"_\" + table_identifier + \"_identifier\" %}\n {%- set relation.value=adapter.get_relation(\n database=source(corrected_schema_name[1], table_identifier).database,\n schema=source(corrected_schema_name[1], table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n {% endfor %}\n {% else %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifier\" %}\n {# Unfortunately the Twitter Organic identifiers were misspelled. As such, we will need to account for this in the model. This will be adjusted in the Twitter Organic package, but to ensure backwards compatibility, this needs to be included. #}\n {% if var(identifier_var, none) is none %} \n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifer\" %}\n {% endif %}\n {%- set relation.value=adapter.get_relation(\n database=source(default_schema, table_identifier).database,\n schema=source(default_schema, table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n{%- set table_exists=relation.value is not none -%}\n\n{%- if table_exists -%}\n select * \n from {{ relation.value }}\n{%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n{%- endif -%}\n{%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.550014, "supported_languages": null}, "macro.fivetran_utils.dummy_coalesce_value": {"name": "dummy_coalesce_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/dummy_coalesce_value.sql", "original_file_path": "macros/dummy_coalesce_value.sql", "unique_id": "macro.fivetran_utils.dummy_coalesce_value", "macro_sql": "{% macro dummy_coalesce_value(column) %}\n\n{% set coalesce_value = {\n 'STRING': \"'DUMMY_STRING'\",\n 'BOOLEAN': 'null',\n 'INT': 999999999,\n 'FLOAT': 999999999.99,\n 'TIMESTAMP': 'cast(\"2099-12-31\" as timestamp)',\n 'DATE': 'cast(\"2099-12-31\" as date)',\n} %}\n\n{% if column.is_float() %}\n{{ return(coalesce_value['FLOAT']) }}\n\n{% elif column.is_numeric() %}\n{{ return(coalesce_value['INT']) }}\n\n{% elif column.is_string() %}\n{{ return(coalesce_value['STRING']) }}\n\n{% elif column.data_type|lower == 'boolean' %}\n{{ return(coalesce_value['BOOLEAN']) }}\n\n{% elif 'timestamp' in column.data_type|lower %}\n{{ return(coalesce_value['TIMESTAMP']) }}\n\n{% elif 'date' in column.data_type|lower %}\n{{ return(coalesce_value['DATE']) }}\n\n{% elif 'int' in column.data_type|lower %}\n{{ return(coalesce_value['INT']) }}\n\n{% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.551372, "supported_languages": null}, "macro.fivetran_utils.extract_url_parameter": {"name": "extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.extract_url_parameter", "macro_sql": "{% macro extract_url_parameter(field, url_parameter) -%}\n\n{{ adapter.dispatch('extract_url_parameter', 'fivetran_utils') (field, url_parameter) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__extract_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.55171, "supported_languages": null}, "macro.fivetran_utils.default__extract_url_parameter": {"name": "default__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.default__extract_url_parameter", "macro_sql": "{% macro default__extract_url_parameter(field, url_parameter) -%}\n\n{{ dbt_utils.get_url_parameter(field, url_parameter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.551859, "supported_languages": null}, "macro.fivetran_utils.spark__extract_url_parameter": {"name": "spark__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.spark__extract_url_parameter", "macro_sql": "{% macro spark__extract_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"=([^&]+)'\" -%}\nnullif(regexp_extract({{ field }}, {{ formatted_url_parameter }}, 1), '')\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.552052, "supported_languages": null}, "macro.fivetran_utils.wrap_in_quotes": {"name": "wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.wrap_in_quotes", "macro_sql": "{%- macro wrap_in_quotes(object_to_quote) -%}\n\n{{ return(adapter.dispatch('wrap_in_quotes', 'fivetran_utils')(object_to_quote)) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.postgres__wrap_in_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.552383, "supported_languages": null}, "macro.fivetran_utils.default__wrap_in_quotes": {"name": "default__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.default__wrap_in_quotes", "macro_sql": "{%- macro default__wrap_in_quotes(object_to_quote) -%}\n{# bigquery, spark, databricks #}\n `{{ object_to_quote }}`\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.552499, "supported_languages": null}, "macro.fivetran_utils.snowflake__wrap_in_quotes": {"name": "snowflake__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.snowflake__wrap_in_quotes", "macro_sql": "{%- macro snowflake__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote | upper }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.552609, "supported_languages": null}, "macro.fivetran_utils.redshift__wrap_in_quotes": {"name": "redshift__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.redshift__wrap_in_quotes", "macro_sql": "{%- macro redshift__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.552709, "supported_languages": null}, "macro.fivetran_utils.postgres__wrap_in_quotes": {"name": "postgres__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.postgres__wrap_in_quotes", "macro_sql": "{%- macro postgres__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5528069, "supported_languages": null}, "macro.fivetran_utils.array_agg": {"name": "array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.array_agg", "macro_sql": "{% macro array_agg(field_to_agg) -%}\n\n{{ adapter.dispatch('array_agg', 'fivetran_utils') (field_to_agg) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__array_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.553057, "supported_languages": null}, "macro.fivetran_utils.default__array_agg": {"name": "default__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.default__array_agg", "macro_sql": "{% macro default__array_agg(field_to_agg) %}\n array_agg({{ field_to_agg }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5531578, "supported_languages": null}, "macro.fivetran_utils.redshift__array_agg": {"name": "redshift__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.redshift__array_agg", "macro_sql": "{% macro redshift__array_agg(field_to_agg) %}\n listagg({{ field_to_agg }}, ',')\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5532548, "supported_languages": null}, "macro.fivetran_utils.empty_variable_warning": {"name": "empty_variable_warning", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/empty_variable_warning.sql", "original_file_path": "macros/empty_variable_warning.sql", "unique_id": "macro.fivetran_utils.empty_variable_warning", "macro_sql": "{% macro empty_variable_warning(variable, downstream_model) %}\n\n{% if not var(variable) %}\n{{ log(\n \"\"\"\n Warning: You have passed an empty list to the \"\"\" ~ variable ~ \"\"\".\n As a result, you won't see the history of any columns in the \"\"\" ~ downstream_model ~ \"\"\" model.\n \"\"\",\n info=True\n) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.553658, "supported_languages": null}, "macro.fivetran_utils.enabled_vars_one_true": {"name": "enabled_vars_one_true", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars_one_true.sql", "original_file_path": "macros/enabled_vars_one_true.sql", "unique_id": "macro.fivetran_utils.enabled_vars_one_true", "macro_sql": "{% macro enabled_vars_one_true(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, False) == True %}\n {{ return(True) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(False) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.554049, "supported_languages": null}, "macro.zendesk.coalesce_cast": {"name": "coalesce_cast", "resource_type": "macro", "package_name": "zendesk", "path": "macros/coalesce_cast.sql", "original_file_path": "macros/coalesce_cast.sql", "unique_id": "macro.zendesk.coalesce_cast", "macro_sql": "{% macro coalesce_cast(column_list, datatype) -%}\n {{ return(adapter.dispatch('coalesce_cast', 'zendesk')(column_list, datatype)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.default__coalesce_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.554374, "supported_languages": null}, "macro.zendesk.default__coalesce_cast": {"name": "default__coalesce_cast", "resource_type": "macro", "package_name": "zendesk", "path": "macros/coalesce_cast.sql", "original_file_path": "macros/coalesce_cast.sql", "unique_id": "macro.zendesk.default__coalesce_cast", "macro_sql": "{% macro default__coalesce_cast(column_list, datatype) %}\n coalesce(\n {%- for column in column_list %}\n cast({{ column }} as {{ datatype }})\n {%- if not loop.last -%},{%- endif -%}\n {% endfor %}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.554615, "supported_languages": null}, "macro.zendesk.count_tokens": {"name": "count_tokens", "resource_type": "macro", "package_name": "zendesk", "path": "macros/count_tokens.sql", "original_file_path": "macros/count_tokens.sql", "unique_id": "macro.zendesk.count_tokens", "macro_sql": "{% macro count_tokens(column_name) -%}\n {{ return(adapter.dispatch('count_tokens', 'zendesk')(column_name)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.default__count_tokens"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.554852, "supported_languages": null}, "macro.zendesk.default__count_tokens": {"name": "default__count_tokens", "resource_type": "macro", "package_name": "zendesk", "path": "macros/count_tokens.sql", "original_file_path": "macros/count_tokens.sql", "unique_id": "macro.zendesk.default__count_tokens", "macro_sql": "{% macro default__count_tokens(column_name) %}\n {{ dbt.length(column_name) }} / 4\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.55498, "supported_languages": null}, "macro.zendesk_source.get_domain_name_columns": {"name": "get_domain_name_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_domain_name_columns.sql", "original_file_path": "macros/get_domain_name_columns.sql", "unique_id": "macro.zendesk_source.get_domain_name_columns", "macro_sql": "{% macro get_domain_name_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"domain_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"index\", \"datatype\": dbt.type_int()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.555616, "supported_languages": null}, "macro.zendesk_source.get_user_tag_columns": {"name": "get_user_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_tag_columns.sql", "original_file_path": "macros/get_user_tag_columns.sql", "unique_id": "macro.zendesk_source.get_user_tag_columns", "macro_sql": "{% macro get_user_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.556638, "supported_languages": null}, "macro.zendesk_source.get_ticket_form_history_columns": {"name": "get_ticket_form_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_form_history_columns.sql", "original_file_path": "macros/get_ticket_form_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_form_history_columns", "macro_sql": "{% macro get_ticket_form_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"display_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"end_user_visible\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.557632, "supported_languages": null}, "macro.zendesk_source.get_schedule_columns": {"name": "get_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_columns.sql", "original_file_path": "macros/get_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_columns", "macro_sql": "{% macro get_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"end_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"start_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.558738, "supported_languages": null}, "macro.zendesk_source.get_daylight_time_columns": {"name": "get_daylight_time_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_daylight_time_columns.sql", "original_file_path": "macros/get_daylight_time_columns.sql", "unique_id": "macro.zendesk_source.get_daylight_time_columns", "macro_sql": "{% macro get_daylight_time_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"daylight_end_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"daylight_offset\", \"datatype\": dbt.type_int()},\n {\"name\": \"daylight_start_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"year\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.559463, "supported_languages": null}, "macro.zendesk_source.get_time_zone_columns": {"name": "get_time_zone_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_time_zone_columns.sql", "original_file_path": "macros/get_time_zone_columns.sql", "unique_id": "macro.zendesk_source.get_time_zone_columns", "macro_sql": "{% macro get_time_zone_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"standard_offset\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.559924, "supported_languages": null}, "macro.zendesk_source.get_ticket_tag_columns": {"name": "get_ticket_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_tag_columns.sql", "original_file_path": "macros/get_ticket_tag_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_tag_columns", "macro_sql": "{% macro get_ticket_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.560937, "supported_languages": null}, "macro.zendesk_source.get_organization_tag_columns": {"name": "get_organization_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_tag_columns.sql", "original_file_path": "macros/get_organization_tag_columns.sql", "unique_id": "macro.zendesk_source.get_organization_tag_columns", "macro_sql": "{% macro get_organization_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5619452, "supported_languages": null}, "macro.zendesk_source.get_schedule_holiday_columns": {"name": "get_schedule_holiday_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_holiday_columns.sql", "original_file_path": "macros/get_schedule_holiday_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_holiday_columns", "macro_sql": "{% macro get_schedule_holiday_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_date\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_date\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5627599, "supported_languages": null}, "macro.zendesk_source.get_group_columns": {"name": "get_group_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_group_columns.sql", "original_file_path": "macros/get_group_columns.sql", "unique_id": "macro.zendesk_source.get_group_columns", "macro_sql": "{% macro get_group_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.563568, "supported_languages": null}, "macro.zendesk_source.get_user_columns": {"name": "get_user_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_columns.sql", "original_file_path": "macros/get_user_columns.sql", "unique_id": "macro.zendesk_source.get_user_columns", "macro_sql": "{% macro get_user_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"alias\", \"datatype\": dbt.type_string()},\n {\"name\": \"authenticity_token\", \"datatype\": dbt.type_int()},\n {\"name\": \"chat_only\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"email\", \"datatype\": dbt.type_string()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"last_login_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"locale\", \"datatype\": dbt.type_string()},\n {\"name\": \"locale_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"moderator\", \"datatype\": \"boolean\"},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"only_private_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"phone\", \"datatype\": dbt.type_string()},\n {\"name\": \"remote_photo_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"restricted_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"role\", \"datatype\": dbt.type_string()},\n {\"name\": \"shared\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"signature\", \"datatype\": dbt.type_int()},\n {\"name\": \"suspended\", \"datatype\": \"boolean\"},\n {\"name\": \"ticket_restriction\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"two_factor_auth_enabled\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"verified\", \"datatype\": \"boolean\"}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__user_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_boolean", "macro.dbt.type_string", "macro.dbt.type_int", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.56706, "supported_languages": null}, "macro.zendesk_source.get_ticket_columns": {"name": "get_ticket_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_columns.sql", "original_file_path": "macros/get_ticket_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_columns", "macro_sql": "{% macro get_ticket_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"allow_channelback\", \"datatype\": \"boolean\"},\n {\"name\": \"assignee_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"brand_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"description\", \"datatype\": dbt.type_string()},\n {\"name\": \"due_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"forum_topic_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"has_incidents\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"is_public\", \"datatype\": \"boolean\"},\n {\"name\": \"merged_ticket_ids\", \"datatype\": dbt.type_string()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"priority\", \"datatype\": dbt.type_string()},\n {\"name\": \"problem_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"recipient\", \"datatype\": dbt.type_int()},\n {\"name\": \"requester_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"status\", \"datatype\": dbt.type_string()},\n {\"name\": \"subject\", \"datatype\": dbt.type_string()},\n {\"name\": \"submitter_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_ccs\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_client\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_ip_address\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_json_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_latitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_location\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_longitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_machine_generated\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_message_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_raw_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_form_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"type\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_channel\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_source_from_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_title\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_rel\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_name\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__ticket_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_boolean", "macro.dbt.type_int", "macro.dbt.type_string", "macro.dbt.type_float", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.57192, "supported_languages": null}, "macro.zendesk_source.get_ticket_field_history_columns": {"name": "get_ticket_field_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_field_history_columns.sql", "original_file_path": "macros/get_ticket_field_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_field_history_columns", "macro_sql": "{% macro get_ticket_field_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"field_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"updated\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"value\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5726788, "supported_languages": null}, "macro.zendesk_source.get_ticket_schedule_columns": {"name": "get_ticket_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_schedule_columns.sql", "original_file_path": "macros/get_ticket_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_schedule_columns", "macro_sql": "{% macro get_ticket_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.573229, "supported_languages": null}, "macro.zendesk_source.get_organization_columns": {"name": "get_organization_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_columns.sql", "original_file_path": "macros/get_organization_columns.sql", "unique_id": "macro.zendesk_source.get_organization_columns", "macro_sql": "{% macro get_organization_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"shared_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_tickets\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__organization_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.574593, "supported_languages": null}, "macro.zendesk_source.get_ticket_comment_columns": {"name": "get_ticket_comment_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_comment_columns.sql", "original_file_path": "macros/get_ticket_comment_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_comment_columns", "macro_sql": "{% macro get_ticket_comment_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_string()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"body\", \"datatype\": dbt.type_string()},\n {\"name\": \"call_duration\", \"datatype\": dbt.type_int()},\n {\"name\": \"call_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"facebook_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"location\", \"datatype\": dbt.type_int()},\n {\"name\": \"public\", \"datatype\": \"boolean\"},\n {\"name\": \"recording_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"started_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_status\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_text\", \"datatype\": dbt.type_int()},\n {\"name\": \"trusted\", \"datatype\": dbt.type_int()},\n {\"name\": \"tweet\", \"datatype\": \"boolean\"},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"voice_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"voice_comment_transcription_visible\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.type_boolean", "macro.dbt.type_int", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.5766358, "supported_languages": null}, "macro.zendesk_source.get_brand_columns": {"name": "get_brand_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_brand_columns.sql", "original_file_path": "macros/get_brand_columns.sql", "unique_id": "macro.zendesk_source.get_brand_columns", "macro_sql": "{% macro get_brand_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"brand_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"has_help_center\", \"datatype\": \"boolean\"},\n {\"name\": \"help_center_state\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_content_type\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_file_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_height\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_inline\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_mapped_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_size\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_width\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"subdomain\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1725387302.57877, "supported_languages": null}}, "docs": {"doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {}, "groups": {}, "selectors": {}, "disabled": {"test.zendesk_integration_tests.consistency_ticket_metrics": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_ticket_metrics", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_ticket_metrics.sql", "original_file_path": "tests/consistency/consistency_ticket_metrics.sql", "unique_id": "test.zendesk_integration_tests.consistency_ticket_metrics", "fqn": ["zendesk_integration_tests", "consistency", "consistency_ticket_metrics"], "alias": "consistency_ticket_metrics", "checksum": {"name": "sha256", "checksum": "e630be25d326f99cdad0ebc1d29e71dcd7514aa3e56c999e56d1ed15bc6c10e0"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1725387302.954248, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_prod.zendesk__ticket_metrics\n),\n\ndev as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics\n),\n\nfinal as (\n select \n prod.ticket_id,\n prod.first_reply_time_business_minutes as prod_first_reply_time_business_minutes,\n dev.first_reply_time_business_minutes as dev_first_reply_time_business_minutes,\n prod.first_reply_time_calendar_minutes as prod_first_reply_time_calendar_minutes,\n dev.first_reply_time_calendar_minutes as dev_first_reply_time_calendar_minutes\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n)\n\nselect *\nfrom final\nwhere (abs(prod_first_reply_time_business_minutes - dev_first_reply_time_business_minutes) >= 5\n or abs(prod_first_reply_time_calendar_minutes - dev_first_reply_time_calendar_minutes) >= 5)\n {{ \"and ticket_id not in \" ~ var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policy_count": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_sla_policy_count", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policy_count.sql", "original_file_path": "tests/consistency/consistency_sla_policy_count.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policy_count", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policy_count"], "alias": "consistency_sla_policy_count", "checksum": {"name": "sha256", "checksum": "b30a06ff7e3d392b2fdfa6b5f34633f6c7f8e018e31eef64fcdf2eeaffcae18a"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1725387302.971228, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n {{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) }}\n group by 1\n),\n\ndev as (\n select\n ticket_id,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n {{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) }}\n group by 1\n),\n\nfinal as (\n select \n prod.ticket_id as prod_ticket_id,\n dev.ticket_id as dev_ticket_id,\n prod.total_slas as prod_sla_total,\n dev.total_slas as dev_sla_total\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n)\n\nselect *\nfrom final\nwhere prod_sla_total != dev_sla_total", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policies": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_sla_policies", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policies.sql", "original_file_path": "tests/consistency/consistency_sla_policies.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policies", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policies"], "alias": "consistency_sla_policies", "checksum": {"name": "sha256", "checksum": "bdad5490a4a975665c4b658101726f92c08755dd96f6372d8606b47e60fe29d4"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1725387302.974726, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select \n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n round(sla_elapsed_time, -1) as sla_elapsed_time, --round to the nearest tens\n is_active_sla,\n is_sla_breach\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n),\n\ndev as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n round(sla_elapsed_time, -1) as sla_elapsed_time, --round to the nearest tens\n is_active_sla,\n is_sla_breach\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n),\n\nprod_not_in_dev as (\n -- rows from prod not found in dev\n select * from prod\n except distinct\n select * from dev\n),\n\ndev_not_in_prod as (\n -- rows from dev not found in prod\n select * from dev\n except distinct\n select * from prod\n),\n\nfinal as (\n select\n *,\n 'from prod' as source\n from prod_not_in_dev\n\n union all -- union since we only care if rows are produced\n\n select\n *,\n 'from dev' as source\n from dev_not_in_prod\n)\n\nselect *\nfrom final\n{{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policies_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policies_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.metrics_count_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "metrics_count_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/metrics_count_match.sql", "original_file_path": "tests/integrity/metrics_count_match.sql", "unique_id": "test.zendesk_integration_tests.metrics_count_match", "fqn": ["zendesk_integration_tests", "integrity", "metrics_count_match"], "alias": "metrics_count_match", "checksum": {"name": "sha256", "checksum": "53ccabe8ccd33f6f141fd44f764ef58c91077e0c8ea501be1fb9b8d084d7aac0"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1725387302.977865, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n-- check that all the tickets are accounted for in the metrics\nwith stg_count as (\n select\n count(*) as stg_ticket_count\n from {{ ref('stg_zendesk__ticket') }}\n),\n\nmetric_count as (\n select\n count(*) as metric_ticket_count\n from source\n from {{ ref('zendesk__ticket_metrics') }}\n)\n\nselect *\nfrom stg_count\njoin metric_count\n on stg_ticket_count != metric_ticket_count", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_metrics_parity": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_metrics_parity", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_metrics_parity.sql", "original_file_path": "tests/integrity/sla_metrics_parity.sql", "unique_id": "test.zendesk_integration_tests.sla_metrics_parity", "fqn": ["zendesk_integration_tests", "integrity", "sla_metrics_parity"], "alias": "sla_metrics_parity", "checksum": {"name": "sha256", "checksum": "d18407ef45d1ce6b2d4eeaca9286dfb8b3b1db85021e3fd69701fb0c33138675"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1725387302.980715, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n/*\nThis test is to ensure the sla_elapsed_time from zendesk__sla_policies matches the corresponding time in zendesk__ticket_metrics.\n*/\n\nwith dev_slas as (\n select *\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n where in_business_hours\n\n), dev_metrics as (\n select *\n from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics\n\n), dev_compare as (\n select \n dev_slas.ticket_id,\n dev_slas.metric,\n cast(dev_slas.sla_elapsed_time as {{ dbt.type_int() }}) as time_from_slas,\n case when metric = 'agent_work_time' then dev_metrics.agent_work_time_in_business_minutes\n when metric = 'requester_wait_time' then dev_metrics.requester_wait_time_in_business_minutes\n when metric = 'first_reply_time' then dev_metrics.first_reply_time_business_minutes\n end as time_from_metrics\n from dev_slas\n left join dev_metrics\n on dev_metrics.ticket_id = dev_slas.ticket_id\n)\n\nselect *\nfrom dev_compare\nwhere abs(time_from_slas - time_from_metrics) >= 5\n{{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_metric_parity_exclusion_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_metric_parity_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_first_reply_time_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_first_reply_time_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_first_reply_time_match.sql", "original_file_path": "tests/integrity/sla_first_reply_time_match.sql", "unique_id": "test.zendesk_integration_tests.sla_first_reply_time_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_first_reply_time_match"], "alias": "sla_first_reply_time_match", "checksum": {"name": "sha256", "checksum": "a94e41e1bdbc5f4cb6268590d22f37692a708dd7471344b09e2d29a4edf4ccea"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1725387302.985549, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith ticket_metrics as (\n select\n ticket_id,\n first_reply_time_business_minutes\n from {{ ref('zendesk__ticket_metrics') }}\n),\n\nsla_policies as (\n select\n ticket_id,\n sla_elapsed_time\n from {{ ref('zendesk__sla_policies') }}\n where metric = 'first_reply_time'\n and in_business_hours\n),\n\nmatch_check as (\n select \n ticket_metrics.ticket_id,\n ticket_metrics.first_reply_time_business_minutes,\n sla_policies.sla_elapsed_time\n from ticket_metrics\n full outer join sla_policies \n on ticket_metrics.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere abs(round(first_reply_time_business_minutes,0) - round(sla_elapsed_time,0)) >= 2\n {{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_count_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_count_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_count_match.sql", "original_file_path": "tests/integrity/sla_count_match.sql", "unique_id": "test.zendesk_integration_tests.sla_count_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_count_match"], "alias": "sla_count_match", "checksum": {"name": "sha256", "checksum": "b1f23baf0d04729d4855197e4e5f6e76bf72502c3739371ebee1a6d626a6d8b8"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1725387302.988725, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n-- The necessary source and source_filter adjustments used below originate from the int_zendesk__sla_policy_applied model\nwith source as (\n select\n *,\n case when field_name = 'first_reply_time' then row_number() over (partition by ticket_id, field_name order by valid_starting_at desc) else 1 end as latest_sla\n from {{ ref('stg_zendesk__ticket_field_history') }}\n),\n\nsource_filter as (\n select\n ticket_id,\n count(*) as source_row_count\n from source\n where field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n and value is not null\n and latest_sla = 1\n group by 1\n),\n\nsla_policies as (\n select\n ticket_id,\n count(*) as end_model_row_count\n from {{ ref('zendesk__sla_policies') }}\n group by 1\n),\n\nmatch_check as (\n select \n sla_policies.ticket_id,\n end_model_row_count,\n source_row_count\n from sla_policies\n full outer join source_filter\n on source_filter.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere end_model_row_count != source_row_count\n{{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_count_match_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_count_match_tickets',[]) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "seed.zendesk_integration_tests.organization_tag_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data_snowflake.csv", "original_file_path": "seeds/organization_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "organization_tag_data_snowflake"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "d9219b78d44b8b4620100b064a3af350fb5fa2046bdb0c376a09bade7a99f6f7"}, "config": {"enabled": false, "alias": "organization_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "organization_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1725387303.0606458, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.brand_data": [{"database": "postgres", "schema": "zz_zendesk", "name": "brand_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data.csv", "original_file_path": "seeds/brand_data.csv", "unique_id": "seed.zendesk_integration_tests.brand_data", "fqn": ["zendesk_integration_tests", "brand_data"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "203980ef5845715ee0758982a85b96a30c8e4b06fbda7f104705bd4cdd586aa9"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'postgres' else false }}"}, "created_at": 1725387303.067225, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.user_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "user_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data_snowflake.csv", "original_file_path": "seeds/user_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_data_snowflake", "fqn": ["zendesk_integration_tests", "user_data_snowflake"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "1d7712839e43bb49c4fb8a2bba60a98e8c3ea558c91a3d4fb4f4db6e1425f178"}, "config": {"enabled": false, "alias": "user_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "external_id": "bigint", "locale_id": "bigint", "organization_id": "bigint", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "alias": "user_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1725387303.069661, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.user_tag_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "user_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data_snowflake.csv", "original_file_path": "seeds/user_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "user_tag_data_snowflake"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "7c2274e05f81c1f9906a6a4a217c4493bf003a151402391069f49c64cf9ec5fb"}, "config": {"enabled": false, "alias": "user_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "user_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1725387303.072028, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}]}, "parent_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.brand_data_postgres": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__group"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.zendesk__ticket_summary": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.zendesk__sla_policies": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.zendesk__ticket_backlog": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__schedule"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__sla_policy_applied"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__updater_information", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_enriched", "source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__calendar_spine": ["source.zendesk_source.zendesk.ticket"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.zendesk__document": ["model.zendesk.int_zendesk__ticket_comment_documents_grouped", "model.zendesk.int_zendesk__ticket_document"], "model.zendesk.int_zendesk__ticket_comment_documents_grouped": ["model.zendesk.int_zendesk__ticket_comment_document"], "model.zendesk.int_zendesk__ticket_comment_document": ["model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__ticket_document": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__updates": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__daylight_time", "model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday", "model.zendesk_source.stg_zendesk__time_zone"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk_source.stg_zendesk__domain_name", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk_source.stg_zendesk__group_tmp"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk_source.stg_zendesk__user_tmp"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk_source.stg_zendesk__ticket_tmp"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["source.zendesk_source.zendesk.daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["source.zendesk_source.zendesk.user"], "model.zendesk_source.stg_zendesk__group_tmp": ["source.zendesk_source.zendesk.group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["source.zendesk_source.zendesk.ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["source.zendesk_source.zendesk.brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["source.zendesk_source.zendesk.ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["source.zendesk_source.zendesk.schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["source.zendesk_source.zendesk.user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["source.zendesk_source.zendesk.ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["source.zendesk_source.zendesk.ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["source.zendesk_source.zendesk.organization_tag"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["source.zendesk_source.zendesk.schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["source.zendesk_source.zendesk.organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["source.zendesk_source.zendesk.ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["source.zendesk_source.zendesk.domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["source.zendesk_source.zendesk.time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": ["model.zendesk.zendesk__sla_policies"], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": ["model.zendesk_source.stg_zendesk__domain_name"], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": ["model.zendesk_source.stg_zendesk__daylight_time"], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "source.zendesk_source.zendesk.ticket": [], "source.zendesk_source.zendesk.brand": [], "source.zendesk_source.zendesk.domain_name": [], "source.zendesk_source.zendesk.group": [], "source.zendesk_source.zendesk.organization_tag": [], "source.zendesk_source.zendesk.organization": [], "source.zendesk_source.zendesk.ticket_comment": [], "source.zendesk_source.zendesk.user_tag": [], "source.zendesk_source.zendesk.user": [], "source.zendesk_source.zendesk.schedule": [], "source.zendesk_source.zendesk.ticket_schedule": [], "source.zendesk_source.zendesk.ticket_form_history": [], "source.zendesk_source.zendesk.ticket_tag": [], "source.zendesk_source.zendesk.ticket_field_history": [], "source.zendesk_source.zendesk.daylight_time": [], "source.zendesk_source.zendesk.time_zone": [], "source.zendesk_source.zendesk.schedule_holiday": []}, "child_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.brand_data_postgres": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.zendesk__ticket_metrics", "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.zendesk__ticket_summary", "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c"], "model.zendesk.zendesk__ticket_summary": [], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.zendesk__ticket_backlog"], "model.zendesk.zendesk__sla_policies": ["test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd"], "model.zendesk.zendesk__ticket_backlog": [], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_reply_times"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__field_history_enriched"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__calendar_spine": ["model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__schedule_spine"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.zendesk__document": [], "model.zendesk.int_zendesk__ticket_comment_documents_grouped": ["model.zendesk.zendesk__document"], "model.zendesk.int_zendesk__ticket_comment_document": ["model.zendesk.int_zendesk__ticket_comment_documents_grouped"], "model.zendesk.int_zendesk__ticket_document": ["model.zendesk.zendesk__document"], "model.zendesk.int_zendesk__updates": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk.int_zendesk__user_aggregates"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk.int_zendesk__ticket_aggregates"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_history_enriched", "model.zendesk.int_zendesk__updates"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk.int_zendesk__schedule_spine", "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk.int_zendesk__schedule_spine", "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk.int_zendesk__schedule_spine", "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_enriched", "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk.int_zendesk__ticket_comment_document", "model.zendesk.int_zendesk__updates", "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__ticket_comment_document", "model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_summary", "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk.int_zendesk__latest_ticket_form", "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk.int_zendesk__organization_aggregates", "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk.int_zendesk__organization_aggregates"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["model.zendesk_source.stg_zendesk__daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["model.zendesk_source.stg_zendesk__user"], "model.zendesk_source.stg_zendesk__group_tmp": ["model.zendesk_source.stg_zendesk__group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["model.zendesk_source.stg_zendesk__ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["model.zendesk_source.stg_zendesk__brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["model.zendesk_source.stg_zendesk__ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["model.zendesk_source.stg_zendesk__organization_tag"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["model.zendesk_source.stg_zendesk__schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["model.zendesk_source.stg_zendesk__organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["model.zendesk_source.stg_zendesk__domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": [], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": [], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": [], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": [], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": [], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": [], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": [], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": [], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": [], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": [], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": [], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": [], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": [], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": [], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": [], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": [], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": [], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": [], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": [], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": [], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": [], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": [], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": [], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": [], "source.zendesk_source.zendesk.ticket": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket_tmp"], "source.zendesk_source.zendesk.brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "source.zendesk_source.zendesk.domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "source.zendesk_source.zendesk.group": ["model.zendesk_source.stg_zendesk__group_tmp"], "source.zendesk_source.zendesk.organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "source.zendesk_source.zendesk.organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "source.zendesk_source.zendesk.ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "source.zendesk_source.zendesk.user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "source.zendesk_source.zendesk.user": ["model.zendesk_source.stg_zendesk__user_tmp"], "source.zendesk_source.zendesk.schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "source.zendesk_source.zendesk.ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "source.zendesk_source.zendesk.ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "source.zendesk_source.zendesk.ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "source.zendesk_source.zendesk.ticket_field_history": ["model.zendesk.int_zendesk__field_history_pivot", "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "source.zendesk_source.zendesk.daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "source.zendesk_source.zendesk.time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "source.zendesk_source.zendesk.schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {}, "unit_tests": {}} \ No newline at end of file +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": "1.8.3", "generated_at": "2024-10-09T16:53:07.142891Z", "invocation_id": "ee1cfc0d-443e-4374-ad8a-25dc360a3746", "env": {}, "project_name": "zendesk_integration_tests", "project_id": "b8a12ac1bacdf035438fc7646299ce11", "user_id": "8268eefe-e8f7-472e-ab2a-a92f0135d76d", "send_anonymous_usage_stats": true, "adapter_type": "postgres"}, "nodes": {"seed.zendesk_integration_tests.organization_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data.csv", "original_file_path": "seeds/organization_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data", "fqn": ["zendesk_integration_tests", "organization_tag_data"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "adebcb3827e908ab449435adc556aadf587cfad4103cab2c840d3d9fddc16e20"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1728492760.5350242, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_comment_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_comment_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_comment_data.csv", "original_file_path": "seeds/ticket_comment_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_comment_data", "fqn": ["zendesk_integration_tests", "ticket_comment_data"], "alias": "ticket_comment_data", "checksum": {"name": "sha256", "checksum": "033e18229b848b4809699f04f39605771faf437e583a1aefe1af5625f0ac7de5"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "user_id": "bigint", "created": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created": "timestamp"}}, "created_at": 1728492760.536248, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_comment_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_holiday_data": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_holiday_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_holiday_data.csv", "original_file_path": "seeds/schedule_holiday_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data", "fqn": ["zendesk_integration_tests", "schedule_holiday_data"], "alias": "schedule_holiday_data", "checksum": {"name": "sha256", "checksum": "f907dea5e2dc21649bf4eae0392add96a884f19f900dc0f2d568141038ba5d28"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "schedule_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1728492760.538736, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_holiday_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.domain_name_data": {"database": "postgres", "schema": "zz_zendesk", "name": "domain_name_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "domain_name_data.csv", "original_file_path": "seeds/domain_name_data.csv", "unique_id": "seed.zendesk_integration_tests.domain_name_data", "fqn": ["zendesk_integration_tests", "domain_name_data"], "alias": "domain_name_data", "checksum": {"name": "sha256", "checksum": "3bf711417f9269957353aa9e1ddd28ada8bd74e03128a4b8c94e694a560a09cf"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1728492760.5416782, "relation_name": "\"postgres\".\"zz_zendesk\".\"domain_name_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_field_history_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_field_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_field_history_data.csv", "original_file_path": "seeds/ticket_field_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data", "fqn": ["zendesk_integration_tests", "ticket_field_history_data"], "alias": "ticket_field_history_data", "checksum": {"name": "sha256", "checksum": "47c9244103b9a8dc25c5ce75693b8389df92258dde23dae71a09f021cf1b7ab7"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "bigint", "user_id": "bigint", "updated": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "updated": "timestamp"}}, "created_at": 1728492760.544234, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.audit_log_data": {"database": "postgres", "schema": "zz_zendesk", "name": "audit_log_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "audit_log_data.csv", "original_file_path": "seeds/audit_log_data.csv", "unique_id": "seed.zendesk_integration_tests.audit_log_data", "fqn": ["zendesk_integration_tests", "audit_log_data"], "alias": "audit_log_data", "checksum": {"name": "sha256", "checksum": "9979d1f37155833b5af3a3de6d9bcca8ac3143b8ecd59e32efca95a1b8e44b10"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728492760.5455658, "relation_name": "\"postgres\".\"zz_zendesk\".\"audit_log_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_data.csv", "original_file_path": "seeds/ticket_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_data", "fqn": ["zendesk_integration_tests", "ticket_data"], "alias": "ticket_data", "checksum": {"name": "sha256", "checksum": "effe2837ec0ff3ec59fddc7fce0a5f4a6ff0a69daef5ae904244dcbf34425dae"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "bigint", "brand_id": "bigint", "external_id": "bigint", "forum_topic_id": "bigint", "group_id": "bigint", "organization_id": "bigint", "problem_id": "bigint", "requester_id": "bigint", "submitter_id": "bigint", "ticket_form_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "brand_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "forum_topic_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "group_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "problem_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "requester_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "submitter_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "ticket_form_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1728492760.54765, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.brand_data_postgres": {"database": "postgres", "schema": "zz_zendesk", "name": "brand_data_postgres", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data_postgres.csv", "original_file_path": "seeds/brand_data_postgres.csv", "unique_id": "seed.zendesk_integration_tests.brand_data_postgres", "fqn": ["zendesk_integration_tests", "brand_data_postgres"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "aa338ab31e4a221da8a0ed5040ec921a4d39a7377ae37a7e79b49e1402e490f5"}, "config": {"enabled": true, "alias": "brand_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "alias": "brand_data", "enabled": "{{ true if target.type == 'postgres' else false }}"}, "created_at": 1728492760.5490701, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.time_zone_data": {"database": "postgres", "schema": "zz_zendesk", "name": "time_zone_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "time_zone_data.csv", "original_file_path": "seeds/time_zone_data.csv", "unique_id": "seed.zendesk_integration_tests.time_zone_data", "fqn": ["zendesk_integration_tests", "time_zone_data"], "alias": "time_zone_data", "checksum": {"name": "sha256", "checksum": "b02df4f14e54c7deb0b15c40b35196968de4374ceb1cc5ad95986620a506adb2"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728492760.550294, "relation_name": "\"postgres\".\"zz_zendesk\".\"time_zone_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_schedule_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_schedule_data.csv", "original_file_path": "seeds/ticket_schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data", "fqn": ["zendesk_integration_tests", "ticket_schedule_data"], "alias": "ticket_schedule_data", "checksum": {"name": "sha256", "checksum": "dc4892d18f3730242f5319bb24498d77a4c32a666b6b4d5c0eec0d4dafd7224b"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "bigint", "schedule_id": "bigint", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1728492760.551784, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_schedule_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.daylight_time_data": {"database": "postgres", "schema": "zz_zendesk", "name": "daylight_time_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "daylight_time_data.csv", "original_file_path": "seeds/daylight_time_data.csv", "unique_id": "seed.zendesk_integration_tests.daylight_time_data", "fqn": ["zendesk_integration_tests", "daylight_time_data"], "alias": "daylight_time_data", "checksum": {"name": "sha256", "checksum": "17642d90548c6367ab328762a47066a905e3ba2da8831cd86ef37ac659a38fc9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728492760.5531719, "relation_name": "\"postgres\".\"zz_zendesk\".\"daylight_time_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_data": {"database": "postgres", "schema": "zz_zendesk", "name": "user_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data.csv", "original_file_path": "seeds/user_data.csv", "unique_id": "seed.zendesk_integration_tests.user_data", "fqn": ["zendesk_integration_tests", "user_data"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "9f600c24b84ed0183e88c5aaa4e7e02bd2228115bebc85217f04c97bd5b6dbc9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "external_id": "bigint", "locale_id": "bigint", "organization_id": "bigint", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1728492760.554456, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_data": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_data.csv", "original_file_path": "seeds/schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_data", "fqn": ["zendesk_integration_tests", "schedule_data"], "alias": "schedule_data", "checksum": {"name": "sha256", "checksum": "e2596e44df02b53d13b850f9742084141b7b75755baae603c8d3db6b8354107a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "end_time": "bigint", "start_time": "bigint", "end_time_utc": "bigint", "start_time_utc": "bigint", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1728492760.555796, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_tag_data.csv", "original_file_path": "seeds/ticket_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_tag_data", "fqn": ["zendesk_integration_tests", "ticket_tag_data"], "alias": "ticket_tag_data", "checksum": {"name": "sha256", "checksum": "020b25c3247e21387702778ce0af4e5a5b8b3aee62daaa05f48c643489b57ea0"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728492760.556999, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.organization_data": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_data.csv", "original_file_path": "seeds/organization_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_data", "fqn": ["zendesk_integration_tests", "organization_data"], "alias": "organization_data", "checksum": {"name": "sha256", "checksum": "b3e00faed1ea214f73182b110c5f55653a5b43f2bc082dcb87f6c63dea5303c3"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1728492760.55834, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_form_history_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_form_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_form_history_data.csv", "original_file_path": "seeds/ticket_form_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data", "fqn": ["zendesk_integration_tests", "ticket_form_history_data"], "alias": "ticket_form_history_data", "checksum": {"name": "sha256", "checksum": "a5b4edef05a0baa9acac87db3eea1ac0ba55865809db778ff458e20b7352c665"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1728492760.559668, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_form_history_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.group_data": {"database": "postgres", "schema": "zz_zendesk", "name": "group_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "group_data.csv", "original_file_path": "seeds/group_data.csv", "unique_id": "seed.zendesk_integration_tests.group_data", "fqn": ["zendesk_integration_tests", "group_data"], "alias": "group_data", "checksum": {"name": "sha256", "checksum": "ded51f1b267e9785ca862ca30656faa2485b5814d834ea35de6892702c3dbd1a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1728492760.561072, "relation_name": "\"postgres\".\"zz_zendesk\".\"group_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "user_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data.csv", "original_file_path": "seeds/user_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data", "fqn": ["zendesk_integration_tests", "user_tag_data"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "fde0d85263495e783fd6fb342940a4dcd67c39581d55bfc9b28935d24367a096"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "user_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1728492760.562429, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "model.zendesk.zendesk__ticket_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_enriched", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_enriched.sql", "original_file_path": "models/zendesk__ticket_enriched.sql", "unique_id": "model.zendesk.zendesk__ticket_enriched", "fqn": ["zendesk", "zendesk__ticket_enriched"], "alias": "zendesk__ticket_enriched", "checksum": {"name": "sha256", "checksum": "8d5ccce79dd53bd307569a9a086b4205cfebbd616bb74b594766e524a281c244"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk ticket, enriched with data about it's tags, assignees, requester, submitter, organization and group.", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the ticket has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.572698, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"", "raw_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n{% if var('using_ticket_form_history', True) %}\n), latest_ticket_form as (\n\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), latest_satisfaction_ratings as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_satisfaction') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), requester_updates as (\n\n select *\n from {{ ref('int_zendesk__requester_updates') }}\n\n), assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__assignee_updates') }}\n\n), ticket_group as (\n \n select *\n from {{ ref('stg_zendesk__group') }}\n\n), organization as (\n\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n latest_ticket_form.name as ticket_form_name,\n {% endif %}\n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n {% endif %}\n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n requester_org.organization_tags as requester_organization_tags,\n {% endif %}\n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n {% endif %}\n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n {% endif %}\n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "language": "sql", "refs": [{"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}, {"name": "int_zendesk__latest_ticket_form", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_satisfaction", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__requester_updates", "package": null, "version": null}, {"name": "int_zendesk__assignee_updates", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__assignee_updates", "model.zendesk_source.stg_zendesk__group", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_enriched.sql", "compiled": true, "compiled_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n\n), latest_ticket_form as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__latest_ticket_form\"\n\n\n), latest_satisfaction_ratings as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), requester_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_updates\"\n\n), assignee_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__assignee_updates\"\n\n), ticket_group as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\n\n), organization as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n latest_ticket_form.name as ticket_form_name,\n \n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n \n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n requester_org.organization_tags as requester_organization_tags,\n \n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n \n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n \n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n \n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_metrics": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_metrics", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_metrics.sql", "original_file_path": "models/zendesk__ticket_metrics.sql", "unique_id": "model.zendesk.zendesk__ticket_metrics", "fqn": ["zendesk", "zendesk__ticket_metrics"], "alias": "zendesk__ticket_metrics", "checksum": {"name": "sha256", "checksum": "71977e3eeb4ea80a2beb205ad3dde4fb9aac17cb8391df9c39b854e658d124cb"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk Support ticket, enriched with metrics about reply times, resolution times and work times. Calendar and business hours are supported", "columns": {"first_reply_time_calendar_minutes": {"name": "first_reply_time_calendar_minutes", "description": "The number of calendar minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_reply_time_business_minutes": {"name": "first_reply_time_business_minutes", "description": "The number of business minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_reply_time_calendar_minutes": {"name": "total_reply_time_calendar_minutes", "description": "The combined calendar time between all end-user comments and the next public agent response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_solved_at": {"name": "first_solved_at", "description": "The time the ticket was first in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_solved_at": {"name": "last_solved_at", "description": "The time the ticket was last in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_calendar_minutes": {"name": "first_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "final_resolution_calendar_minutes": {"name": "final_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_one_touch_resolution": {"name": "is_one_touch_resolution", "description": "A boolean field indicating that the ticket has one public agent response and is in solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_business_minutes": {"name": "first_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "full_resolution_business_minutes": {"name": "full_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_business_minutes": {"name": "agent_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_business_minutes": {"name": "requester_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_business_minutes": {"name": "solve_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_business_minutes": {"name": "agent_work_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_business_minutes": {"name": "on_hold_time_in_business_minutes", "description": "The combined number of business minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_business_minutes": {"name": "new_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_business_minutes": {"name": "open_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_calendar_minutes": {"name": "agent_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_calendar_minutes": {"name": "requester_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_calendar_minutes": {"name": "solve_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_calendar_minutes": {"name": "agent_work_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_calendar_minutes": {"name": "on_hold_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_agent_comments": {"name": "count_agent_comments", "description": "Count of agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_agent_comments": {"name": "count_public_agent_comments", "description": "Count of public agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_end_user_comments": {"name": "count_end_user_comments", "description": "Count of end user comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_internal_comments": {"name": "count_internal_comments", "description": "Count of internal comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_comments": {"name": "count_public_comments", "description": "Count of public comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_comments": {"name": "total_comments", "description": "Total count of all comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_ticket_handoffs": {"name": "count_ticket_handoffs", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": [], "dev_snowflake": "Count of distinct internal users who have touched/commented on the ticket."}, "unique_assignee_count": {"name": "unique_assignee_count", "description": "The count of unique assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_stations_count": {"name": "assignee_stations_count", "description": "The total number of assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_stations_count": {"name": "group_stations_count", "description": "The total count of group stations within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignee_id": {"name": "first_assignee_id", "description": "Assignee id of the first agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignee_id": {"name": "last_assignee_id", "description": "Assignee id of the last agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_agent_assignment_date": {"name": "first_agent_assignment_date", "description": "The date the first agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_agent_assignment_date": {"name": "last_agent_assignment_date", "description": "The date the last agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignment_to_resolution_calendar_minutes": {"name": "first_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the first assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignment_to_resolution_calendar_minutes": {"name": "last_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the last assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_resolutions": {"name": "count_resolutions", "description": "The count of ticket resolutions", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_reopens": {"name": "count_reopens", "description": "The count of ticket reopen events", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_calendar_minutes": {"name": "new_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_calendar_minutes": {"name": "open_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_agent_replies": {"name": "total_agent_replies", "description": "The total number of agent replies within the ticket, excluding comments where an agent created the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_age_minutes": {"name": "requester_last_login_age_minutes", "description": "The time in minutes since the ticket requester was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_age_minutes": {"name": "assignee_last_login_age_minutes", "description": "The time in minutes since the ticket assignee was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_minutes": {"name": "unsolved_ticket_age_minutes", "description": "The time in minutes the ticket has been in an unsolved state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_since_update_minutes": {"name": "unsolved_ticket_age_since_update_minutes", "description": "The time in minutes the ticket has been unsolved since the last update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_two_touch_resolution": {"name": "is_two_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_multi_touch_resolution": {"name": "is_multi_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two or more public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_last_comment_date": {"name": "ticket_last_comment_date", "description": "The time the last comment was applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_unassigned_duration_calendar_minutes": {"name": "ticket_unassigned_duration_calendar_minutes", "description": "The time in minutes the ticket was in an unassigned state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_status_assignment_date": {"name": "last_status_assignment_date", "description": "The time the status was last changed on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the ticket has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.5869162, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"", "raw_code": "with ticket_enriched as (\n\n select *\n from {{ ref('zendesk__ticket_enriched') }}\n\n), ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_reply_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times_calendar') }}\n\n), ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comment_metrics') }}\n\n), ticket_work_time_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_calendar') }}\n\n-- business hour CTEs\n{% if var('using_schedules', True) %}\n\n), ticket_first_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_resolution_time_business') }}\n\n), ticket_full_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_full_resolution_time_business') }}\n\n), ticket_work_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_business') }}\n\n), ticket_first_reply_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_reply_time_business') }}\n\n{% endif %}\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.requester_last_login_at\", dbt.current_timestamp(), 'second') }} /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.assignee_last_login_at\", dbt.current_timestamp(), 'second') }} /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.created_at\", dbt.current_timestamp(), 'second') }} /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.updated_at\", dbt.current_timestamp(), 'second') }} /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n{% if var('using_schedules', True) %}\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n{% else %}\n\n) \n\nselect *\nfrom calendar_hour_metrics\n\n{% endif %}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}, {"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__comment_metrics", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_full_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_reply_time_business", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.datediff"], "nodes": ["model.zendesk.zendesk__ticket_enriched", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_first_reply_time_business"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n), __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n), __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n), __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n), ticket_enriched as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\n\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_reply_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times_calendar\n\n), ticket_comments as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__comment_metrics\"\n\n), ticket_work_time_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_calendar\n\n-- business hour CTEs\n\n\n), ticket_first_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_resolution_time_business\n\n), ticket_full_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_full_resolution_time_business\n\n), ticket_work_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_business\n\n), ticket_first_reply_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_reply_time_business\n\n\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.requester_last_login_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.requester_last_login_at)::timestamp)))\n /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.assignee_last_login_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.assignee_last_login_at)::timestamp)))\n /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.created_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.created_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.created_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.created_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.updated_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.updated_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.updated_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.updated_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}, {"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "sql": " __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_business", "sql": " __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_summary": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_summary", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_summary.sql", "original_file_path": "models/zendesk__ticket_summary.sql", "unique_id": "model.zendesk.zendesk__ticket_summary", "fqn": ["zendesk", "zendesk__ticket_summary"], "alias": "zendesk__ticket_summary", "checksum": {"name": "sha256", "checksum": "085f6c784b70f6ca6f38a8f3d4defb1debb06049d0bb6fe1b778ad7638d08f2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A single record table containing Zendesk ticket and user summary metrics. These metrics are updated for the current day the model is run.", "columns": {"user_count": {"name": "user_count", "description": "Total count of users created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active_agent_count": {"name": "active_agent_count", "description": "Total count of agents", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_user_count": {"name": "deleted_user_count", "description": "Total deleted user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_user_count": {"name": "end_user_count", "description": "Total end user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended_user_count": {"name": "suspended_user_count", "description": "Total count of users in a suspended state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_ticket_count": {"name": "new_ticket_count", "description": "Total count of tickets in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_ticket_count": {"name": "on_hold_ticket_count", "description": "Total count of tickets in the \"hold\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_ticket_count": {"name": "open_ticket_count", "description": "Total count of tickets in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "pending_ticket_count": {"name": "pending_ticket_count", "description": "Total count of tickets in the \"pending\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solved_ticket_count": {"name": "solved_ticket_count", "description": "Total count of solved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_ticket_count": {"name": "problem_ticket_count", "description": "Total count of tickets labeled as problems", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reassigned_ticket_count": {"name": "reassigned_ticket_count", "description": "Total count of tickets that have been reassigned", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reopened_ticket_count": {"name": "reopened_ticket_count", "description": "Total count of tickets that have been reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "surveyed_satisfaction_ticket_count": {"name": "surveyed_satisfaction_ticket_count", "description": "Total count of tickets that have been surveyed for a satisfaction response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unassigned_unsolved_ticket_count": {"name": "unassigned_unsolved_ticket_count", "description": "Total count of tickets that are unassigned and unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_ticket_count": {"name": "unreplied_ticket_count", "description": "Total count of tickets that have not had a reply", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_unsolved_ticket_count": {"name": "unreplied_unsolved_ticket_count", "description": "Total count of tickets that have not had a reply and are unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_count": {"name": "unsolved_ticket_count", "description": "Total count of unsolved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assigned_ticket_count": {"name": "assigned_ticket_count", "description": "Total count of assigned tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_ticket_count": {"name": "deleted_ticket_count", "description": "Total count of deleted tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recovered_ticket_count": {"name": "recovered_ticket_count", "description": "Total count of tickets that were deleted then reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.591616, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_summary\"", "raw_code": "with ticket_metrics as (\n select *\n from {{ ref('zendesk__ticket_metrics') }}\n\n), user_table as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), user_sum as (\n select\n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int"], "nodes": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_summary.sql", "compiled": true, "compiled_code": "with ticket_metrics as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\n\n), user_table as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), user_sum as (\n select\n cast(1 as integer) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as integer) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_field_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_field_history.sql", "original_file_path": "models/zendesk__ticket_field_history.sql", "unique_id": "model.zendesk.zendesk__ticket_field_history", "fqn": ["zendesk", "zendesk__ticket_field_history"], "alias": "zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "2fea56dd7631d630021a96594da99a1b65affd7ec6d7a5a913ef3fc0b7759949"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable and the corresponding updater fields defined in the `ticket_field_history_updater_columns` variable.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_day_id": {"name": "ticket_day_id", "description": "The unique key of the table, a surrogate key of date_day and ticket_id.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The assignee id assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1728492761.5754502, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"", "raw_code": "{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month' } if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{%- set change_data_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_scd')) -%}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_scd') }}\n \n {% if is_incremental() %}\n where valid_from >= (select max(date_day) from {{ this }})\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from {{ this }}\n where date_day = (select max(date_day) from {{ this }} )\n\n{% endif %}\n\n), calendar as (\n\n select *\n from {{ ref('int_zendesk__field_calendar_spine') }}\n where date_day <= current_date\n {% if is_incremental() %}\n and date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n {% if is_incremental() %} \n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , coalesce(change_data.{{ col.name }}, most_recent_data.{{ col.name }}) as {{ col.name }}\n {% endfor %}\n \n {% else %}\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , {{ col.name }}\n {% endfor %}\n {% endif %}\n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n {% if is_incremental() %}\n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n {% endif %}\n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n , {{ col.name }}\n -- create a batch/partition once a new value is provided\n , sum( case when {{ col.name }} is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as {{ col.name }}_field_partition\n\n {% endfor %}\n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n -- grab the value that started this batch/partition\n , first_value( {{ col.name }} ) over (\n partition by ticket_id, {{ col.name }}_field_partition \n order by date_day asc rows between unbounded preceding and current row) as {{ col.name }}\n {% endfor %}\n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( {{ col.name }} as {{ dbt.type_string() }} ) = 'is_null' then null else {{ col.name }} end as {{ col.name }}\n {% endfor %}\n\n from fill_values\n\n), surrogate_key as (\n\n select\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_calendar_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.type_string"], "nodes": ["model.zendesk.int_zendesk__field_history_scd", "model.zendesk.int_zendesk__field_calendar_spine"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with change_data as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_scd\"\n \n \n where valid_from >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\")\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"\n where date_day = (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\" )\n\n\n\n), calendar as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\"\n where date_day <= current_date\n \n and date_day >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\")\n \n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n \n \n , coalesce(change_data.status, most_recent_data.status) as status\n \n , coalesce(change_data.assignee_id, most_recent_data.assignee_id) as assignee_id\n \n , coalesce(change_data.priority, most_recent_data.priority) as priority\n \n \n \n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n \n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n \n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n \n , status\n -- create a batch/partition once a new value is provided\n , sum( case when status is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as status_field_partition\n\n \n , assignee_id\n -- create a batch/partition once a new value is provided\n , sum( case when assignee_id is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as assignee_id_field_partition\n\n \n , priority\n -- create a batch/partition once a new value is provided\n , sum( case when priority is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as priority_field_partition\n\n \n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n \n -- grab the value that started this batch/partition\n , first_value( status ) over (\n partition by ticket_id, status_field_partition \n order by date_day asc rows between unbounded preceding and current row) as status\n \n -- grab the value that started this batch/partition\n , first_value( assignee_id ) over (\n partition by ticket_id, assignee_id_field_partition \n order by date_day asc rows between unbounded preceding and current row) as assignee_id\n \n -- grab the value that started this batch/partition\n , first_value( priority ) over (\n partition by ticket_id, priority_field_partition \n order by date_day asc rows between unbounded preceding and current row) as priority\n \n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( status as TEXT ) = 'is_null' then null else status end as status\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( assignee_id as TEXT ) = 'is_null' then null else assignee_id end as assignee_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( priority as TEXT ) = 'is_null' then null else priority end as priority\n \n\n from fill_values\n\n), surrogate_key as (\n\n select\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__sla_policies": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__sla_policies", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__sla_policies.sql", "original_file_path": "models/zendesk__sla_policies.sql", "unique_id": "model.zendesk.zendesk__sla_policies", "fqn": ["zendesk", "zendesk__sla_policies"], "alias": "zendesk__sla_policies", "checksum": {"name": "sha256", "checksum": "7f12fd205228c0344bec4ae967a46c692bbede3209008a5648f86be4777550ca"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents an SLA policy event and additional sla breach and achievement metrics. Calendar and business hour SLA breaches for `first_reply_time`, `next_reply_time`, `requester_wait_time`, and `agent_work_time` are supported. If there is a SLA you would like supported that is not included, please create a feature request.", "columns": {"sla_event_id": {"name": "sla_event_id", "description": "A surrogate key generated from the combination of ticket_id, metric, and sla_applied_at fields", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_policy_name": {"name": "sla_policy_name", "description": "The name of the SLA policy associated with the SLA metric", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "metric": {"name": "metric", "description": "The SLA metric, either agent_work_time, requester_wait_time, first_reply_time or next_reply_time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_applied_at": {"name": "sla_applied_at", "description": "When the SLA target was triggered. This is the starting time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "target": {"name": "target", "description": "The SLA target, in minutes", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "in_business_hours": {"name": "in_business_hours", "description": "Boolean field indicating if the SLA target is in business hours (true) or calendar hours (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_breach_at": {"name": "sla_breach_at", "description": "The time or expected time of the SLA breach or achieve event.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_elapsed_time": {"name": "sla_elapsed_time", "description": "The total elapsed time to achieve the SLA metric whether breached or achieved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active_sla": {"name": "is_active_sla", "description": "Boolean field indicating that the SLA event is currently active and not breached (true) or past (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_sla_breach": {"name": "is_sla_breach", "description": "Boolean field indicating if the SLA has been breached (true) or was achieved (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.57473, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__sla_policies\"", "raw_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from {{ ref('int_zendesk__reply_time_combined') }}\n\n), agent_work_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_calendar_hours') }}\n\n), requester_wait_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), agent_work_business_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_business_hours') }}\n\n), requester_wait_business_sla as (\n select *\n from {{ ref('int_zendesk__requester_wait_time_business_hours') }}\n\n{% endif %}\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n{% if var('using_schedules', True) %}\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n{% endif %}\n\n)\n\nselect \n {{ dbt_utils.generate_surrogate_key(['ticket_id', 'metric', 'sla_applied_at']) }} as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then ({{ dbt.datediff(\"sla_applied_at\", dbt.current_timestamp(), 'second') }} / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > {{ dbt.current_timestamp() }})\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_combined", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_business_hours", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.max_bool", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.current_timestamp", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__sla_policies.sql", "compiled": true, "compiled_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_combined\"\n\n), agent_work_calendar_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"\n\n), requester_wait_calendar_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"\n\n\n\n), agent_work_business_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"\n\n), requester_wait_business_sla as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"\n\n\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n\n\n)\n\nselect \n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(metric as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sla_applied_at as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then (\n (\n (\n (\n ((now())::date - (sla_applied_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (sla_applied_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (sla_applied_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (sla_applied_at)::timestamp)))\n / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > now())\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_backlog": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_backlog", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_backlog.sql", "original_file_path": "models/zendesk__ticket_backlog.sql", "unique_id": "model.zendesk.zendesk__ticket_backlog", "fqn": ["zendesk", "zendesk__ticket_backlog"], "alias": "zendesk__ticket_backlog", "checksum": {"name": "sha256", "checksum": "546f8460ab16ce0f4671b1ae5742bfdb0f97bc4184c9da30cd21de81400922f7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable for all backlog tickets. Backlog tickets being defined as any ticket not a 'closed', 'deleted', or 'solved' status.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel where the ticket was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The assignee name assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.5921931, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_backlog\"", "raw_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n{{ config(enabled = 'status' in var('ticket_field_history_columns')) }}\n\nwith ticket_field_history as (\n select *\n from {{ ref('zendesk__ticket_field_history') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), group_names as (\n select *\n from {{ ref('stg_zendesk__group') }}\n\n), users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), brands as (\n select *\n from {{ ref('stg_zendesk__brand') }}\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n{% if 'ticket_form_id' in var('ticket_field_history_columns') %}\n), ticket_forms as (\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), organizations as (\n select *\n from {{ ref('stg_zendesk__organization') }}\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n {% for col in var('ticket_field_history_columns') if col != 'status' %} --Looking at all history fields the users passed through in their dbt_project.yml file\n {% if col in ['assignee_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n {% elif col in ['requester_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,requester.name as requester_name\n\n {% elif col in ['ticket_form_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,ticket_forms.name as ticket_form_name\n\n {% elif col in ['organization_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,organizations.name as organization_name\n\n {% elif col in ['brand_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,brands.name as brand_name\n\n {% elif col in ['group_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,group_names.name as group_name\n\n {% elif col in ['locale_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.locale as local_name\n\n {% else %} --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.{{ col }}\n {% endif %}\n {% endfor %}\n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n {% if 'ticket_form_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join ticket_forms\n on ticket_forms.ticket_form_id = cast(ticket_field_history.ticket_form_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'group_id' in var('ticket_field_history_columns') %}--Join not needed if field is not located in variable, otherwise it is included.\n left join group_names\n on group_names.group_id = cast(ticket_field_history.group_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'assignee_id' in var('ticket_field_history_columns') or 'requester_id' in var('ticket_field_history_columns') or 'locale_id' in var('ticket_field_history_columns')%} --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'requester_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join users as requester\n on requester.user_id = cast(ticket_field_history.requester_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'brand_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join brands\n on brands.brand_id = cast(ticket_field_history.brand_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'organization_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join organizations\n on organizations.organization_id = cast(ticket_field_history.organization_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "language": "sql", "refs": [{"name": "zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}, {"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_bigint"], "nodes": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_backlog.sql", "compiled": true, "compiled_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n\n\nwith ticket_field_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"\n\n), tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), group_names as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), brands as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n --Looking at all history fields the users passed through in their dbt_project.yml file\n --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n \n --Looking at all history fields the users passed through in their dbt_project.yml file\n --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.priority\n \n \n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n \n\n \n\n --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as bigint)\n \n\n \n\n \n\n \n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__sla_policy_applied": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/int_zendesk__sla_policy_applied.sql", "original_file_path": "models/sla_policy/int_zendesk__sla_policy_applied.sql", "unique_id": "model.zendesk.int_zendesk__sla_policy_applied", "fqn": ["zendesk", "sla_policy", "int_zendesk__sla_policy_applied"], "alias": "int_zendesk__sla_policy_applied", "checksum": {"name": "sha256", "checksum": "e3fdf31f14e332d08049e6ad3a865a8a8776755ada75ddb655a6cc72a61b9d15"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.790267, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"", "raw_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), sla_policy_name as (\n\n select \n *\n from {{ ref('int_zendesk__updates') }}\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast({{ fivetran_utils.json_parse('ticket_field_history.value', ['minutes']) }} as {{ dbt.type_int() }} ) as target,\n {{ fivetran_utils.json_parse('ticket_field_history.value', ['in_business_hours']) }} = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, {{ dbt.current_timestamp() }}) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.json_parse", "macro.dbt.type_int", "macro.dbt.current_timestamp"], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__ticket_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/int_zendesk__sla_policy_applied.sql", "compiled": true, "compiled_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), sla_policy_name as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast(\n\n ticket_field_history.value::json #>> '{minutes}'\n\n as integer ) as target,\n \n\n ticket_field_history.value::json #>> '{in_business_hours}'\n\n = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, now()) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_business_hours"], "alias": "int_zendesk__agent_work_time_business_hours", "checksum": {"name": "sha256", "checksum": "430c95ca8321909d770cb8caae56a0bdc90d91b889969ddcdfb4725b1bc5f903"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492760.796444, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n \n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_agent_work_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('valid_starting_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \" )\"\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n)\n\nselect * \nfrom agent_work_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_agent_work_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp ) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n)\n\nselect * \nfrom agent_work_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_calendar_hours"], "alias": "int_zendesk__agent_work_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "f25752139fd2e10c5d666783a5abbf36e9d81b6a4e0012f6e42d816e8d20aa81"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.819922, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"", "raw_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_filtered_statuses"], "alias": "int_zendesk__agent_work_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "3d9208f477a6aa3dcf000568e9ca35d8edbdc8c7d47223f34bb1f1aa0f609902"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.8250492, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"", "raw_code": "with agent_work_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with agent_work_time_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n now() + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_business_hours"], "alias": "int_zendesk__reply_time_business_hours", "checksum": {"name": "sha256", "checksum": "12c0706c03db8c187b66676360dc7ae36eb9db9b9c36324366854ec9ca03448d"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492760.829491, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), ticket_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from {{ ref('stg_zendesk__schedule') }}\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(sla_policy_applied.sla_applied_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n {{ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') }} as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_created_at') }} <= sla_policy_applied.sla_applied_at\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_invalidated_at') }} > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n {{ dbt_utils.group_by(n=14) }}\n\n), week_index_calc as (\n select \n *,\n {{ dbt.datediff(\"sla_applied_at\", \"least(coalesce(first_reply_time, \" ~ dbt.current_timestamp() ~ \"), coalesce(first_solved_time, \" ~ dbt.current_timestamp() ~ \"))\", \"week\") }} + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast((7*24*60) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n {{ dbt_date.week_start('sla_applied_at','UTC') }} as starting_point,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_breach_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_start_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_start_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_end_at,\n {{ dbt_date.week_end(\"sla_applied_at\", tz=\"America/UTC\") }} as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "language": "sql", "refs": [{"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.fivetran_utils.timestamp_add", "macro.dbt_utils.group_by", "macro.dbt.current_timestamp", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt_date.week_end"], "nodes": ["model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), sla_policy_applied as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), ticket_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n (\n (\n (\n (\n ((cast(sla_policy_applied.sla_applied_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and \n\n ticket_schedules.schedule_created_at + ((interval '1 second') * (-1))\n\n <= sla_policy_applied.sla_applied_at\n and \n\n ticket_schedules.schedule_invalidated_at + ((interval '1 second') * (-1))\n\n > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10,11,12,13,14\n\n), week_index_calc as (\n select \n *,\n \n (\n ((least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::date - (sla_applied_at)::date)\n / 7 + case\n when date_part('dow', (sla_applied_at)::timestamp) <= date_part('dow', (least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::timestamp) then\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 0 else -1 end\n else\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 1 else 0 end\n end)\n + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as integer) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast((7*24*60) as integer) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as starting_point,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as integer )))\n\n as sla_breach_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_start_time) as integer )))\n\n as sla_schedule_start_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time) as integer )))\n\n as sla_schedule_end_at,\n cast(\n\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) + ((interval '1 day') * (6))\n\n as date) as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_calendar_hours"], "alias": "int_zendesk__reply_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "6ec2775efbac4d405efd0b30a1ec5c593e140c3f4a1be4ff8df7fd0cd4791a2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.846275, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"", "raw_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), final as (\n select\n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(target as \" ~ dbt.type_int() ~ \" )\",\n \"sla_applied_at\" ) }} as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "compiled": true, "compiled_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), final as (\n select\n *,\n \n\n sla_applied_at + ((interval '1 minute') * (cast(target as integer )))\n\n as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_combined": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_combined", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_combined", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_combined"], "alias": "int_zendesk__reply_time_combined", "checksum": {"name": "sha256", "checksum": "3a7a8ddea0400ea314ff4ae83b81654414788634e76af330bf27c384733ac43b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.8504808, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_combined\"", "raw_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from {{ ref('int_zendesk__reply_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), reply_time_business_hours_sla as (\n\n select *\n from {{ ref('int_zendesk__reply_time_business_hours') }}\n\n{% endif %}\n\n), ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as {{ dbt.type_numeric() }}) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as {{ dbt.type_numeric() }}) as week_number,\n cast(null as {{ dbt.type_numeric() }}) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n{% if var('using_schedules', True) %}\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n{% endif %}\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n {{ dbt_utils.group_by(n=10) }}\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n {{ dbt.datediff(\"sla_schedule_start_at\", \"agent_reply_at\", 'second') }} / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and {{ dbt.current_timestamp() }} >= sla_schedule_start_at and ({{ dbt.current_timestamp() }} < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= {{ dbt.current_timestamp() }}) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n {{ dbt.current_timestamp() }} as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + ({{ dbt.datediff(\"sla_schedule_start_at\", \"coalesce(agent_reply_at, next_solved_at, current_time_check)\", 'second') }} / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__reply_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_numeric", "macro.dbt_utils.group_by", "macro.dbt.datediff", "macro.dbt.current_timestamp"], "nodes": ["model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "compiled": true, "compiled_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"\n\n\n\n), reply_time_business_hours_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"\n\n\n\n), ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as numeric(28,6)) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as numeric(28,6)) as week_number,\n cast(null as numeric(28,6)) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n \n (\n (\n (\n ((agent_reply_at)::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (agent_reply_at)::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (agent_reply_at)::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (agent_reply_at)::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and now() >= sla_schedule_start_at and (now() < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= now()) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n now() as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + (\n (\n (\n (\n ((coalesce(agent_reply_at, next_solved_at, current_time_check))::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_calendar_hours"], "alias": "int_zendesk__requester_wait_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "adaa86b537177e2792f3b8e48def56a520c6a442b11f3859c649f549d4b60087"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.859007, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"", "raw_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_business_hours"], "alias": "int_zendesk__requester_wait_time_business_hours", "checksum": {"name": "sha256", "checksum": "5562a77785bebf0f99e2d574f4b762ca5149c3c92245a7e35b345bf3ffb1cb00"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492760.8642151, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes --- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_requester_wait_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('valid_starting_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \" )\"\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes --- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_requester_wait_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp ) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_filtered_statuses"], "alias": "int_zendesk__requester_wait_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "3dcdd6a267ee2ec704192d6e14b7af92ba52316f66389455c5bf3d0c73649188"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.873724, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"", "raw_code": "with requester_wait_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with requester_wait_time_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n now() + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_reply_times", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times"], "alias": "int_zendesk__ticket_reply_times", "checksum": {"name": "sha256", "checksum": "6de1b30f99a9bbd078c823538ca0e87c5b57d33160f65c290ecd67765e8d4472"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.878148, "relation_name": null, "raw_code": "with ticket_public_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n ({{ dbt.datediff(\n 'end_user_comment_created_at',\n 'agent_responded_at',\n 'second') }} / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_reply_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times_calendar.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times_calendar"], "alias": "int_zendesk__ticket_reply_times_calendar", "checksum": {"name": "sha256", "checksum": "6fb6a60134019d78fcfc8c135b4a7887b3ce52ec53d8db463194f7824d2c71c2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.881825, "relation_name": null, "raw_code": "with ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_reply_times"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comments_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comments_enriched", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__comments_enriched.sql", "original_file_path": "models/reply_times/int_zendesk__comments_enriched.sql", "unique_id": "model.zendesk.int_zendesk__comments_enriched", "fqn": ["zendesk", "reply_times", "int_zendesk__comments_enriched"], "alias": "int_zendesk__comments_enriched", "checksum": {"name": "sha256", "checksum": "970004a2aa343ae78a3f810828600c7eca8585428b52b05e4353f9debc6f1af5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.885153, "relation_name": null, "raw_code": "with ticket_comment as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'comment'\n\n), users as (\n\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__comments_enriched.sql", "compiled": true, "compiled_code": "with ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_reply_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_first_reply_time_business", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_first_reply_time_business.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_first_reply_time_business"], "alias": "int_zendesk__ticket_first_reply_time_business", "checksum": {"name": "sha256", "checksum": "0bacc5f74a5eac2a55c2b0bacb1a0b7908783948ad162b84c230be9310dd02b5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728492760.8865888, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n), ticket_schedules as (\n\n select \n *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_enriched", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_enriched.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_enriched.sql", "unique_id": "model.zendesk.int_zendesk__field_history_enriched", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_enriched"], "alias": "int_zendesk__field_history_enriched", "checksum": {"name": "sha256", "checksum": "cdf920b1df5fee8c6a08b0e26996028d327964903e8acc4dd15498d23c00005c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.893893, "relation_name": null, "raw_code": "with ticket_field_history as (\n\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), updater_info as (\n select *\n from {{ ref('int_zendesk__updater_information') }}\n\n), final as (\n select\n ticket_field_history.*\n\n {% if var('ticket_field_history_updater_columns')%} --The below will be run if any fields are included in the variable within the dbt_project.yml.\n {% for col in var('ticket_field_history_updater_columns') %} --Iterating through the updater fields included in the variable.\n\n --The below statements are needed to populate Zendesk automated fields for when the zendesk triggers automatically change fields based on user defined triggers.\n {% if col in ['updater_is_active'] %}\n ,coalesce(updater_info.{{ col|lower }}, true) as {{ col }}\n\n {% elif col in ['updater_user_id','updater_organization_id'] %}\n ,coalesce(updater_info.{{ col|lower }}, -1) as {{ col }}\n \n {% elif col in ['updater_last_login_at'] %}\n ,coalesce(updater_info.{{ col|lower }}, current_timestamp) as {{ col }}\n \n {% else %}\n ,coalesce(updater_info.{{ col|lower }}, concat('zendesk_trigger_change_', '{{ col }}' )) as {{ col }}\n \n {% endif %}\n {% endfor %}\n {% endif %} \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "int_zendesk__updater_information", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk.int_zendesk__updater_information"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_enriched.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_pivot": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_pivot", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_pivot.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_pivot.sql", "unique_id": "model.zendesk.int_zendesk__field_history_pivot", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_pivot"], "alias": "int_zendesk__field_history_pivot", "checksum": {"name": "sha256", "checksum": "077bf8d76ba0523c2ebb987be0fd0746acbdae8fdbdd39fc7a03203a5d070f87"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1728492760.899085, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\"", "raw_code": "-- depends_on: {{ source('zendesk', 'ticket_field_history') }}\n\n{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{% if execute -%}\n {% set results = run_query('select distinct field_name from ' ~ source('zendesk', 'ticket_field_history') ) %}\n {% set results_list = results.columns[0].values() %}\n{% endif -%}\n\nwith field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n ,\n {{ var('ticket_field_history_updater_columns') | join (\", \")}}\n\n {% endif %}\n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from {{ ref('int_zendesk__field_history_enriched') }}\n {% if is_incremental() %}\n where cast( {{ dbt.date_trunc('day', 'valid_starting_at') }} as date) >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast({{ dbt.date_trunc('day', 'valid_starting_at') }} as date) as date_day\n\n {% for col in results_list if col in var('ticket_field_history_columns') %}\n {% set col_xf = col|lower %}\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.value end) as {{ col_xf }}\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n\n {% for upd in var('ticket_field_history_updater_columns') %}\n\n {% set upd_xf = (col|lower + '_' + upd ) %} --Creating the appropriate column name based on the history field + update field names.\n\n {% if upd == 'updater_is_active' and target.type in ('postgres', 'redshift') %}\n\n ,bool_or(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% else %}\n\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% endif %}\n {% endfor %}\n {% endif %}\n {% endfor %}\n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n {{ dbt_utils.generate_surrogate_key(['ticket_id','date_day'])}} as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_enriched", "package": null, "version": null}], "sources": [["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.date_trunc", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.run_query"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history", "model.zendesk.int_zendesk__field_history_enriched"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_pivot.sql", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"\n\n\n\n\n \nwith __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n), field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from __dbt__cte__int_zendesk__field_history_enriched\n \n where cast( date_trunc('day', valid_starting_at) as date) >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\")\n \n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast(date_trunc('day', valid_starting_at) as date) as date_day\n\n \n \n ,min(case when lower(field_name) = 'status' then filtered.value end) as status\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'assignee_id' then filtered.value end) as assignee_id\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'priority' then filtered.value end) as priority\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}, {"id": "model.zendesk.int_zendesk__field_history_enriched", "sql": " __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updater_information": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updater_information", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__updater_information.sql", "original_file_path": "models/ticket_history/int_zendesk__updater_information.sql", "unique_id": "model.zendesk.int_zendesk__updater_information", "fqn": ["zendesk", "ticket_history", "int_zendesk__updater_information"], "alias": "int_zendesk__updater_information", "checksum": {"name": "sha256", "checksum": "62a690646cff991c0e0b6e205440a070bb44aab8d4d9286714710c52a4c6677a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.908752, "relation_name": null, "raw_code": "with users as (\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), organizations as (\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,users.user_tags as updater_user_tags\n {% endif %}\n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,organizations.domain_names as updater_organization_domain_names\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,organizations.organization_tags as updater_organization_organization_tags\n {% endif %}\n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__updater_information.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_scd": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_scd", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_scd.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_scd.sql", "unique_id": "model.zendesk.int_zendesk__field_history_scd", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_scd"], "alias": "int_zendesk__field_history_scd", "checksum": {"name": "sha256", "checksum": "a748f9163dc6edaca993c8a3f5e3cecc9d057d3b47817d403e0b0778deda2466"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.913312, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_scd\"", "raw_code": "-- model needs to materialize as a table to avoid erroneous null values\n{{ config( materialized='table') }} \n\n{% set ticket_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_pivot')) %}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_pivot') }}\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,{{ col.name }}\n ,sum(case when {{ col.name }} is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as {{ col.name }}_field_partition\n {% endfor %}\n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,first_value( {{ col.name }} ) over (partition by {{ col.name }}_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as {{ col.name }}\n \n {% endfor %}\n from set_values\n) \n\nselect *\nfrom fill_values", "language": "sql", "refs": [{"name": "int_zendesk__field_history_pivot", "package": null, "version": null}, {"name": "int_zendesk__field_history_pivot", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__field_history_pivot"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_scd.sql", "compiled": true, "compiled_code": "-- model needs to materialize as a table to avoid erroneous null values\n \n\n\n\nwith change_data as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\"\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n \n\n ,status\n ,sum(case when status is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as status_field_partition\n \n\n ,assignee_id\n ,sum(case when assignee_id is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as assignee_id_field_partition\n \n\n ,priority\n ,sum(case when priority is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as priority_field_partition\n \n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n \n\n ,first_value( status ) over (partition by status_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as status\n \n \n\n ,first_value( assignee_id ) over (partition by assignee_id_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as assignee_id\n \n \n\n ,first_value( priority ) over (partition by priority_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as priority\n \n \n from set_values\n) \n\nselect *\nfrom fill_values", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_calendar_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_calendar_spine.sql", "original_file_path": "models/ticket_history/int_zendesk__field_calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__field_calendar_spine", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_calendar_spine"], "alias": "int_zendesk__field_calendar_spine", "checksum": {"name": "sha256", "checksum": "01739353b5d9fec39fe39ca428ceb43b51a64bd7408d1f4825fcf1d598fb15ca"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1728492760.950457, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\"", "raw_code": "{{\n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n )\n}}\n\nwith calendar as (\n\n select *\n from {{ ref('int_zendesk__calendar_spine') }}\n {% if is_incremental() %}\n where date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( {{ dbt.date_trunc('day', \"case when status != 'closed' then \" ~ dbt.current_timestamp() ~ \" else updated_at end\") }} as date) as open_until\n from {{ var('ticket') }}\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and {{ dbt.dateadd('month', var('ticket_field_history_extension_months', 0), 'ticket.open_until') }} >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__calendar_spine", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.current_timestamp", "macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_utils.generate_surrogate_key"], "nodes": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_calendar_spine.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwith spine as (\n\n \n\n \n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n + \n \n p11.generated_number * power(2, 11)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n cross join \n \n p as p11\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 3211\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2016-01-01' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast\n), calendar as (\n\n select *\n from __dbt__cte__int_zendesk__calendar_spine\n \n where date_day >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\")\n \n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( date_trunc('day', case when status != 'closed' then now() else updated_at end) as date) as open_until\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and \n\n ticket.open_until + ((interval '1 month') * (0))\n\n >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__calendar_spine", "sql": " __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwith spine as (\n\n \n\n \n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n + \n \n p11.generated_number * power(2, 11)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n cross join \n \n p as p11\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 3211\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2016-01-01' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_work_time_calendar", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_calendar"], "alias": "int_zendesk__ticket_work_time_calendar", "checksum": {"name": "sha256", "checksum": "e3cda559c663cc0e6ef1defcf5d8c418bbb9c20bb60aa118fc698579b3c37814"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.957124, "relation_name": null, "raw_code": "with ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "compiled": true, "compiled_code": "with ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_work_time_business", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_business.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_business", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_business"], "alias": "int_zendesk__ticket_work_time_business", "checksum": {"name": "sha256", "checksum": "9ea4023c98c8bdebaf01445490e058d4766cb32a45db569e01e91fa8eac2e689"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728492760.958596, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where {{ dbt.datediff('greatest(valid_starting_at, schedule_created_at)', 'least(valid_ending_at, schedule_invalidated_at)', 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.status_schedule_start as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.status_schedule_start',\n 'ticket_status_crossed_with_schedule.status_schedule_end',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=7) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "compiled": true, "compiled_code": "\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__calendar_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "utils/int_zendesk__calendar_spine.sql", "original_file_path": "models/utils/int_zendesk__calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__calendar_spine", "fqn": ["zendesk", "utils", "int_zendesk__calendar_spine"], "alias": "int_zendesk__calendar_spine", "checksum": {"name": "sha256", "checksum": "722fbe199f8263916801adf6a6f035c8dc37de056bbd359bd9c42f834b3f3ef3"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.966472, "relation_name": null, "raw_code": "-- depends_on: {{ var('ticket') }}\nwith spine as (\n\n {% if execute and flags.WHICH in ('run', 'build') %}\n\n {%- set first_date_query %}\n select \n coalesce(\n min(cast(created_at as date)), \n cast({{ dbt.dateadd(\"month\", -1, \"current_date\") }} as date)\n ) as min_date\n from {{ var('ticket') }}\n -- by default take all the data \n where cast(created_at as date) >= {{ dbt.dateadd('year', \n - var('ticket_field_history_timeframe_years', 50), \"current_date\") }}\n {% endset -%}\n\n {%- set first_date = dbt_utils.get_single_value(first_date_query) %}\n\n {% else %}\n {%- set first_date = '2016-01-01' %}\n\n {% endif %}\n\n{{\n dbt_utils.date_spine(\n datepart = \"day\", \n start_date = \"cast('\" ~ first_date ~ \"' as date)\",\n end_date = dbt.dateadd(\"week\", 1, \"current_date\")\n ) \n}}\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_utils.date_spine"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/utils/int_zendesk__calendar_spine.sql", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwith spine as (\n\n \n\n \n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n + \n \n p11.generated_number * power(2, 11)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n cross join \n \n p as p11\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 3211\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2016-01-01' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__timezone_daylight": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__timezone_daylight", "resource_type": "model", "package_name": "zendesk", "path": "utils/int_zendesk__timezone_daylight.sql", "original_file_path": "models/utils/int_zendesk__timezone_daylight.sql", "unique_id": "model.zendesk.int_zendesk__timezone_daylight", "fqn": ["zendesk", "utils", "int_zendesk__timezone_daylight"], "alias": "int_zendesk__timezone_daylight", "checksum": {"name": "sha256", "checksum": "021f733ee1abac848fb9d6cfff1c4981f24919f7ff0f59e9c2895654831d9dd8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728492760.9816241, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith timezone as (\n\n select *\n from {{ var('time_zone') }}\n\n), daylight_time as (\n\n select *\n from {{ var('daylight_time') }}\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp() }} as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as {{ dbt.type_timestamp() }}) as valid_from,\n cast(valid_until as {{ dbt.type_timestamp() }}) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.dateadd", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone", "model.zendesk_source.stg_zendesk__daylight_time"]}, "compiled_path": "target/compiled/zendesk/models/utils/int_zendesk__timezone_daylight.sql", "compiled": true, "compiled_code": "\n\nwith timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n now() + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n now() + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(now() as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as timestamp) as valid_from,\n cast(valid_until as timestamp) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_resolution_times_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_resolution_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_resolution_times_calendar"], "alias": "int_zendesk__ticket_resolution_times_calendar", "checksum": {"name": "sha256", "checksum": "0c3e1e19084b3e1829c18b80315e8f64aaf63e94522fc56d64652e89b02afadc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.9876359, "relation_name": null, "raw_code": "with historical_solved_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n where status = 'solved'\n\n), ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_historical_assignee as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_assignee') }}\n\n), ticket_historical_group as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_group') }}\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n {{ dbt.datediff(\n 'ticket_historical_assignee.first_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as first_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket_historical_assignee.last_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as last_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.first_solved_at',\n 'minute' ) }} as first_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.last_solved_at',\n 'minute') }} as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_assignee", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "compiled": true, "compiled_code": "with historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_resolution_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_first_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_first_resolution_time_business"], "alias": "int_zendesk__ticket_first_resolution_time_business", "checksum": {"name": "sha256", "checksum": "92b30d97de3fa5a059b70ef930d731bc7cfeb93a39206970f37ed605264c01af"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728492760.993813, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_full_resolution_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_full_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_full_resolution_time_business"], "alias": "int_zendesk__ticket_full_resolution_time_business", "checksum": {"name": "sha256", "checksum": "c14c73bcfcc33dc8bc6a94827770c47f4e70f4608f3227bbbc1f10cbcad4c572"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728492761.003202, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/zendesk__document.sql", "original_file_path": "models/unstructured/zendesk__document.sql", "unique_id": "model.zendesk.zendesk__document", "fqn": ["zendesk", "unstructured", "zendesk__document"], "alias": "zendesk__document", "checksum": {"name": "sha256", "checksum": "0d3d8f2e10bcc679a958386cd5b13f616e17139821263f12c8dddef34c93b21b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk ticket, enriched with data about it's tags, assignees, requester, submitter, organization and group.", "columns": {"document_id": {"name": "document_id", "description": "Equivalent to `ticket_id`.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk_index": {"name": "chunk_index", "description": "The index of the chunk associated with the `document_id`.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk_tokens_approximate": {"name": "chunk_tokens_approximate", "description": "Approximate number of tokens for the chunk, assuming 4 characters per token.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk": {"name": "chunk", "description": "The text of the chunk.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/unstructured/zendesk_unstructured.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.674829, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith ticket_document as (\n select *\n from {{ ref('int_zendesk__ticket_document') }}\n\n), grouped as (\n select *\n from {{ ref('int_zendesk__ticket_comment_documents_grouped') }}\n\n), final as (\n select\n cast(ticket_document.ticket_id as {{ dbt.type_string() }}) as document_id,\n grouped.chunk_index,\n grouped.chunk_tokens as chunk_tokens_approximate,\n {{ dbt.concat([\n \"ticket_document.ticket_markdown\",\n \"'\\\\n\\\\n## COMMENTS\\\\n\\\\n'\",\n \"grouped.comments_group_markdown\"]) }}\n as chunk\n from ticket_document\n join grouped\n on grouped.ticket_id = ticket_document.ticket_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__ticket_document", "package": null, "version": null}, {"name": "int_zendesk__ticket_comment_documents_grouped", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.concat"], "nodes": ["model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__ticket_comment_documents_grouped"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/zendesk__document.sql", "compiled": true, "compiled_code": "\n\nwith ticket_document as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_document\"\n\n), grouped as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_documents_grouped\"\n\n), final as (\n select\n cast(ticket_document.ticket_id as TEXT) as document_id,\n grouped.chunk_index,\n grouped.chunk_tokens as chunk_tokens_approximate,\n ticket_document.ticket_markdown || '\\n\\n## COMMENTS\\n\\n' || grouped.comments_group_markdown\n as chunk\n from ticket_document\n join grouped\n on grouped.ticket_id = ticket_document.ticket_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_comment_documents_grouped": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_documents_grouped", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "unique_id": "model.zendesk.int_zendesk__ticket_comment_documents_grouped", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_comment_documents_grouped"], "alias": "int_zendesk__ticket_comment_documents_grouped", "checksum": {"name": "sha256", "checksum": "ad03266e19d20396ca75812cb98816f3e11e078c63c30807790903674f4db42b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.014288, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_documents_grouped\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith filtered_comment_documents as (\n select *\n from {{ ref('int_zendesk__ticket_comment_document') }}\n),\n\ngrouped_comment_documents as (\n select \n ticket_id,\n comment_markdown,\n comment_tokens,\n comment_time,\n sum(comment_tokens) over (\n partition by ticket_id \n order by comment_time\n rows between unbounded preceding and current row\n ) as cumulative_length\n from filtered_comment_documents\n)\n\nselect \n ticket_id,\n cast({{ dbt_utils.safe_divide('floor(cumulative_length - 1)', var('zendesk_max_tokens', 5000)) }} as {{ dbt.type_int() }}) as chunk_index,\n {{ dbt.listagg(\n measure=\"comment_markdown\",\n delimiter_text=\"'\\\\n\\\\n---\\\\n\\\\n'\",\n order_by_clause=\"order by comment_time\"\n ) }} as comments_group_markdown,\n sum(comment_tokens) as chunk_tokens\nfrom grouped_comment_documents\ngroup by 1,2", "language": "sql", "refs": [{"name": "int_zendesk__ticket_comment_document", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.safe_divide", "macro.dbt.type_int", "macro.dbt.listagg"], "nodes": ["model.zendesk.int_zendesk__ticket_comment_document"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "compiled": true, "compiled_code": "\n\nwith filtered_comment_documents as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_document\"\n),\n\ngrouped_comment_documents as (\n select \n ticket_id,\n comment_markdown,\n comment_tokens,\n comment_time,\n sum(comment_tokens) over (\n partition by ticket_id \n order by comment_time\n rows between unbounded preceding and current row\n ) as cumulative_length\n from filtered_comment_documents\n)\n\nselect \n ticket_id,\n cast(\n ( floor(cumulative_length - 1) ) / nullif( ( 5000 ), 0)\n as integer) as chunk_index,\n \n string_agg(\n comment_markdown,\n '\\n\\n---\\n\\n'\n order by comment_time\n ) as comments_group_markdown,\n sum(comment_tokens) as chunk_tokens\nfrom grouped_comment_documents\ngroup by 1,2", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_comment_document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "unique_id": "model.zendesk.int_zendesk__ticket_comment_document", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_comment_document"], "alias": "int_zendesk__ticket_comment_document", "checksum": {"name": "sha256", "checksum": "e75f893dec0ca7599db16793ad9b39bf5d33f463abe6fa4d7be8019e095f45d8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.022666, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith ticket_comments as (\n select *\n from {{ var('ticket_comment') }}\n\n), users as (\n select *\n from {{ var('user') }}\n\n), comment_details as (\n select \n ticket_comments.ticket_comment_id,\n ticket_comments.ticket_id,\n {{ zendesk.coalesce_cast([\"users.email\", \"'UNKNOWN'\"], dbt.type_string()) }} as commenter_email,\n {{ zendesk.coalesce_cast([\"users.name\", \"'UNKNOWN'\"], dbt.type_string()) }} as commenter_name,\n ticket_comments.created_at as comment_time,\n ticket_comments.body as comment_body\n from ticket_comments\n left join users\n on ticket_comments.user_id = users.user_id\n where not coalesce(ticket_comments._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), comment_markdowns as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n cast(\n {{ dbt.concat([\n \"'### message from '\", \"commenter_name\", \"' ('\", \"commenter_email\", \"')\\\\n'\",\n \"'##### sent @ '\", \"comment_time\", \"'\\\\n'\",\n \"comment_body\"\n ]) }} as {{ dbt.type_string() }})\n as comment_markdown\n from comment_details\n\n), comments_tokens as (\n select\n *,\n {{ zendesk.count_tokens(\"comment_markdown\") }} as comment_tokens\n from comment_markdowns\n\n), truncated_comments as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n case when comment_tokens > {{ var('zendesk_max_tokens', 5000) }} then left(comment_markdown, {{ var('zendesk_max_tokens', 5000) }} * 4) -- approximate 4 characters per token\n else comment_markdown\n end as comment_markdown,\n case when comment_tokens > {{ var('zendesk_max_tokens', 5000) }} then {{ var('zendesk_max_tokens', 5000) }}\n else comment_tokens\n end as comment_tokens\n from comments_tokens\n)\n\nselect *\nfrom truncated_comments", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.zendesk.coalesce_cast", "macro.dbt.concat", "macro.zendesk.count_tokens"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "compiled": true, "compiled_code": "\n\nwith ticket_comments as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), comment_details as (\n select \n ticket_comments.ticket_comment_id,\n ticket_comments.ticket_id,\n \n coalesce(\n cast(users.email as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as commenter_email,\n \n coalesce(\n cast(users.name as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as commenter_name,\n ticket_comments.created_at as comment_time,\n ticket_comments.body as comment_body\n from ticket_comments\n left join users\n on ticket_comments.user_id = users.user_id\n where not coalesce(ticket_comments._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), comment_markdowns as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n cast(\n '### message from ' || commenter_name || ' (' || commenter_email || ')\\n' || '##### sent @ ' || comment_time || '\\n' || comment_body as TEXT)\n as comment_markdown\n from comment_details\n\n), comments_tokens as (\n select\n *,\n \n \n\n length(\n comment_markdown\n ) / 4 -- 1 token is approximately 4 characters, and we only need an approximation here.\n as comment_tokens\n from comment_markdowns\n\n), truncated_comments as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n case when comment_tokens > 5000 then left(comment_markdown, 5000 * 4) -- approximate 4 characters per token\n else comment_markdown\n end as comment_markdown,\n case when comment_tokens > 5000 then 5000\n else comment_tokens\n end as comment_tokens\n from comments_tokens\n)\n\nselect *\nfrom truncated_comments", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_document.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_document.sql", "unique_id": "model.zendesk.int_zendesk__ticket_document", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_document"], "alias": "int_zendesk__ticket_document", "checksum": {"name": "sha256", "checksum": "1fd6807d45c4904ff1ecbc4b929c675ae0b766b40a711641af85cfe4c6cae4ec"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.0344992, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith tickets as (\n select *\n from {{ var('ticket') }}\n\n), users as (\n select *\n from {{ var('user') }}\n\n), ticket_details as (\n select\n tickets.ticket_id,\n tickets.subject AS ticket_name,\n {{ zendesk.coalesce_cast([\"users.name\", \"'UNKNOWN'\"], dbt.type_string()) }} as user_name,\n {{ zendesk.coalesce_cast([\"users.email\", \"'UNKNOWN'\"], dbt.type_string()) }} as created_by,\n tickets.created_at AS created_on,\n {{ zendesk.coalesce_cast([\"tickets.status\", \"'UNKNOWN'\"], dbt.type_string()) }} as status,\n {{ zendesk.coalesce_cast([\"tickets.priority\", \"'UNKNOWN'\"], dbt.type_string()) }} as priority\n from tickets\n left join users\n on tickets.requester_id = users.user_id\n where not coalesce(tickets._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), final as (\n select\n ticket_id,\n {{ dbt.concat([\n \"'# Ticket : '\", \"ticket_name\", \"'\\\\n\\\\n'\",\n \"'Created By : '\", \"user_name\", \"' ('\", \"created_by\", \"')\\\\n'\",\n \"'Created On : '\", \"created_on\", \"'\\\\n'\",\n \"'Status : '\", \"status\", \"'\\\\n'\",\n \"'Priority : '\", \"priority\"\n ]) }} as ticket_markdown\n from ticket_details\n)\n\nselect \n *,\n {{ zendesk.count_tokens(\"ticket_markdown\") }} as ticket_tokens\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.zendesk.coalesce_cast", "macro.dbt.concat", "macro.zendesk.count_tokens"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_document.sql", "compiled": true, "compiled_code": "\n\nwith tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), ticket_details as (\n select\n tickets.ticket_id,\n tickets.subject AS ticket_name,\n \n coalesce(\n cast(users.name as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as user_name,\n \n coalesce(\n cast(users.email as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as created_by,\n tickets.created_at AS created_on,\n \n coalesce(\n cast(tickets.status as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as status,\n \n coalesce(\n cast(tickets.priority as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as priority\n from tickets\n left join users\n on tickets.requester_id = users.user_id\n where not coalesce(tickets._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), final as (\n select\n ticket_id,\n '# Ticket : ' || ticket_name || '\\n\\n' || 'Created By : ' || user_name || ' (' || created_by || ')\\n' || 'Created On : ' || created_on || '\\n' || 'Status : ' || status || '\\n' || 'Priority : ' || priority as ticket_markdown\n from ticket_details\n)\n\nselect \n *,\n \n \n\n length(\n ticket_markdown\n ) / 4 -- 1 token is approximately 4 characters, and we only need an approximation here.\n as ticket_tokens\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__updates.sql", "original_file_path": "models/intermediate/int_zendesk__updates.sql", "unique_id": "model.zendesk.int_zendesk__updates", "fqn": ["zendesk", "intermediate", "int_zendesk__updates"], "alias": "int_zendesk__updates", "checksum": {"name": "sha256", "checksum": "3ecf6bfe15bd7a820b369379fff7dadf236c00ce2fe6c7e335c73c07ba67de0e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.0417101, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"", "raw_code": "with ticket_history as (\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), ticket_comment as (\n select *\n from {{ ref('stg_zendesk__ticket_comment') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as {{ dbt.type_string() }}) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__updates.sql", "compiled": true, "compiled_code": "with ticket_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), ticket_comment as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as TEXT) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_assignee.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_assignee.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_assignee"], "alias": "int_zendesk__ticket_historical_assignee", "checksum": {"name": "sha256", "checksum": "7ae5d5632274b7ccf900910f272cf791e7e976e48fbd170adca647955ab5e2ae"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.045344, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"", "raw_code": "with assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then {{ dbt.datediff(\"coalesce(previous_update, ticket_created_date)\", \"valid_starting_at\", 'second') }} / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n {{ dbt_utils.group_by(n=6) }}\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_utils.group_by"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_assignee.sql", "compiled": true, "compiled_code": "with assignee_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then \n (\n (\n (\n ((valid_starting_at)::date - (coalesce(previous_update, ticket_created_date))::date)\n * 24 + date_part('hour', (valid_starting_at)::timestamp) - date_part('hour', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + date_part('minute', (valid_starting_at)::timestamp) - date_part('minute', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + floor(date_part('second', (valid_starting_at)::timestamp)) - floor(date_part('second', (coalesce(previous_update, ticket_created_date))::timestamp)))\n / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n group by 1,2,3,4,5,6\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_status": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_status.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_status.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_status", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_status"], "alias": "int_zendesk__ticket_historical_status", "checksum": {"name": "sha256", "checksum": "c3d207d8a59844953cd5d01532d3e023d7441025158cc2385fc3fa1441e34c13"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.050724, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"", "raw_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n {{ dbt.datediff(\n 'valid_starting_at',\n \"coalesce(valid_ending_at, \" ~ dbt.current_timestamp() ~ \")\",\n 'minute') }} as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_status.sql", "compiled": true, "compiled_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n \n (\n (\n ((coalesce(valid_ending_at, now()))::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (coalesce(valid_ending_at, now()))::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (coalesce(valid_ending_at, now()))::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__user_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__user_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__user_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__user_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__user_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__user_aggregates"], "alias": "int_zendesk__user_aggregates", "checksum": {"name": "sha256", "checksum": "ae23565fdc62d13c33ddb03f3b25a5e288ec6e6ffe6b57cb01496be6ecd2b73f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.054941, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"", "raw_code": "with users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n--If you use user tags this will be included, if not it will be ignored.\n{% if var('using_user_tags', True) %}\n), user_tags as (\n\n select *\n from {{ ref('stg_zendesk__user_tag') }}\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n {{ fivetran_utils.string_agg( 'user_tags.tags', \"', '\" )}} as user_tags\n from user_tags\n group by 1\n\n{% endif %}\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,user_tag_aggregate.user_tags\n {% endif %}\n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n left join user_tag_aggregate\n using(user_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__user_tag", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__user_aggregates.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n--If you use user tags this will be included, if not it will be ignored.\n\n), user_tags as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag\"\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n \n string_agg(user_tags.tags, ', ')\n\n as user_tags\n from user_tags\n group by 1\n\n\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,user_tag_aggregate.user_tags\n \n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n \n left join user_tag_aggregate\n using(user_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_spine", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_spine.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_spine.sql", "unique_id": "model.zendesk.int_zendesk__schedule_spine", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_spine"], "alias": "int_zendesk__schedule_spine", "checksum": {"name": "sha256", "checksum": "ee9430d1e865b3c9ac4ac930a94c7a8493dea64ff945cc34403076de4506e5e0"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.060362, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n/*\n This model generates `valid_from` and `valid_until` timestamps for each schedule start_time and stop_time, \n accounting for timezone changes, holidays, and historical schedule adjustments. The inclusion of holidays \n and historical changes is controlled by variables `using_holidays` and `using_schedule_histories`.\n\n !!! Important distinction for holiday ranges: A holiday remains valid through the entire day specified by \n the `valid_until` field. In contrast, schedule history and timezone `valid_until` values mark the end of \n validity at the start of the specified day.\n*/\n\nwith schedule_timezones as (\n select *\n from {{ ref('int_zendesk__schedule_timezones') }} \n\n{% if var('using_holidays', True) %}\n), schedule_holidays as (\n select *\n from {{ ref('int_zendesk__schedule_holiday') }} \n\n-- Joins the schedules with holidays, ensuring holidays fall within the valid schedule period.\n-- If there are no holidays, the columns are filled with null values.\n), join_holidays as (\n select \n schedule_timezones.schedule_id,\n schedule_timezones.time_zone,\n schedule_timezones.offset_minutes,\n schedule_timezones.start_time_utc,\n schedule_timezones.end_time_utc,\n schedule_timezones.schedule_name,\n schedule_timezones.schedule_valid_from,\n schedule_timezones.schedule_valid_until,\n schedule_timezones.schedule_starting_sunday,\n schedule_timezones.schedule_ending_sunday,\n schedule_timezones.change_type,\n schedule_holidays.holiday_date,\n schedule_holidays.holiday_name,\n schedule_holidays.holiday_valid_from,\n schedule_holidays.holiday_valid_until,\n schedule_holidays.holiday_starting_sunday,\n schedule_holidays.holiday_ending_sunday,\n schedule_holidays.holiday_start_or_end\n from schedule_timezones\n left join schedule_holidays\n on schedule_holidays.schedule_id = schedule_timezones.schedule_id\n and schedule_holidays.holiday_date >= schedule_timezones.schedule_valid_from\n and schedule_holidays.holiday_date < schedule_timezones.schedule_valid_until\n\n-- Find and count all holidays that fall within a schedule range.\n), valid_from_partition as(\n select\n join_holidays.*,\n row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index,\n count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index\n from join_holidays\n\n-- Label the partition start and add a row for to account for the partition end if there are multiple valid periods.\n), add_partition_end_row as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n case when valid_from_index = 1 and holiday_start_or_end is not null\n then 'partition_start'\n else holiday_start_or_end\n end as holiday_start_or_end,\n valid_from_index,\n max_valid_from_index\n from valid_from_partition\n \n union all\n\n -- when max_valid_from_index > 1, then we want to duplicate the last row to end the partition.\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n 'partition_end' as holiday_start_or_end,\n max_valid_from_index + 1 as valid_from_index,\n max_valid_from_index\n from valid_from_partition\n where max_valid_from_index > 1\n and valid_from_index = max_valid_from_index -- this finds the last rows to duplicate\n\n-- Adjusts and fills the valid from and valid until times for each partition, taking into account the partition start, gap, or holiday.\n), adjust_ranges as(\n select\n add_partition_end_row.*,\n case\n when holiday_start_or_end = 'partition_start'\n then schedule_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lag(holiday_ending_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_starting_sunday\n when holiday_start_or_end = 'partition_end'\n then holiday_ending_sunday\n else schedule_starting_sunday\n end as valid_from,\n case \n when holiday_start_or_end = 'partition_start'\n then holiday_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lead(holiday_starting_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_ending_sunday\n when holiday_start_or_end = 'partition_end'\n then schedule_ending_sunday\n else schedule_ending_sunday\n end as valid_until\n from add_partition_end_row\n\n), holiday_weeks as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n valid_from,\n valid_until,\n holiday_name,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_start_or_end,\n valid_from_index,\n case when holiday_start_or_end = '1_holiday'\n then 'holiday'\n else change_type\n end as change_type\n from adjust_ranges\n -- filter out irrelevant records after adjusting the ranges\n where not (valid_from >= valid_until and holiday_date is not null)\n\n-- Converts holiday valid_from and valid_until times into minutes from the start of the week, adjusting for timezones.\n), valid_minutes as(\n select\n holiday_weeks.*,\n\n -- Calculate holiday_valid_from in minutes from week start\n case when change_type = 'holiday' \n then ({{ dbt.datediff('holiday_starting_sunday', 'holiday_valid_from', 'minute') }}\n - offset_minutes) -- timezone adjustment\n else null\n end as holiday_valid_from_minutes_from_week_start,\n\n -- Calculate holiday_valid_until in minutes from week start\n case when change_type = 'holiday' \n then ({{ dbt.datediff('holiday_starting_sunday', 'holiday_valid_until', 'minute') }}\n + 24 * 60 -- add 1 day to set the upper bound of the holiday\n - offset_minutes)-- timezone adjustment\n else null\n end as holiday_valid_until_minutes_from_week_start\n from holiday_weeks\n\n-- Identifies whether a schedule overlaps with a holiday by comparing start and end times with holiday minutes.\n), find_holidays as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type,\n case \n when start_time_utc < holiday_valid_until_minutes_from_week_start\n and end_time_utc > holiday_valid_from_minutes_from_week_start\n and change_type = 'holiday' \n then holiday_name\n else cast(null as {{ dbt.type_string() }}) \n end as holiday_name,\n count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc) as number_holidays_in_week\n from valid_minutes\n\n-- Filter out records where holiday overlaps don't match, ensuring each schedule's holiday status is consistent.\n), filter_holidays as(\n select \n *,\n cast(1 as {{ dbt.type_int() }}) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week = 1\n\n union all\n\n -- CFount the number of records for each schedule start_time_utc and end_time_utc for filtering later.\n select \n distinct *,\n cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_name) \n as {{ dbt.type_int() }}) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week > 1\n\n), final as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type\n from filter_holidays\n\n -- This filter ensures that for each schedule, the count of holidays in a week matches the number \n -- of distinct schedule records with the same start_time_utc and end_time_utc.\n -- Rows where this count doesn't match indicate overlap with a holiday, so we filter out that record.\n -- Additionally, schedule records that fall on a holiday are excluded by checking if holiday_name is null.\n where number_holidays_in_week = number_records_for_schedule_start_end\n and holiday_name is null\n\n{% else %} \n), final as(\n select \n schedule_id,\n schedule_valid_from as valid_from,\n schedule_valid_until as valid_until,\n start_time_utc,\n end_time_utc,\n change_type\n from schedule_timezones\n{% endif %} \n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__schedule_timezones", "package": null, "version": null}, {"name": "int_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt.type_string", "macro.dbt.type_int"], "nodes": ["model.zendesk.int_zendesk__schedule_timezones", "model.zendesk.int_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_spine.sql", "compiled": true, "compiled_code": "\n\n/*\n This model generates `valid_from` and `valid_until` timestamps for each schedule start_time and stop_time, \n accounting for timezone changes, holidays, and historical schedule adjustments. The inclusion of holidays \n and historical changes is controlled by variables `using_holidays` and `using_schedule_histories`.\n\n !!! Important distinction for holiday ranges: A holiday remains valid through the entire day specified by \n the `valid_until` field. In contrast, schedule history and timezone `valid_until` values mark the end of \n validity at the start of the specified day.\n*/\n\nwith schedule_timezones as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_timezones\" \n\n\n), schedule_holidays as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_holiday\" \n\n-- Joins the schedules with holidays, ensuring holidays fall within the valid schedule period.\n-- If there are no holidays, the columns are filled with null values.\n), join_holidays as (\n select \n schedule_timezones.schedule_id,\n schedule_timezones.time_zone,\n schedule_timezones.offset_minutes,\n schedule_timezones.start_time_utc,\n schedule_timezones.end_time_utc,\n schedule_timezones.schedule_name,\n schedule_timezones.schedule_valid_from,\n schedule_timezones.schedule_valid_until,\n schedule_timezones.schedule_starting_sunday,\n schedule_timezones.schedule_ending_sunday,\n schedule_timezones.change_type,\n schedule_holidays.holiday_date,\n schedule_holidays.holiday_name,\n schedule_holidays.holiday_valid_from,\n schedule_holidays.holiday_valid_until,\n schedule_holidays.holiday_starting_sunday,\n schedule_holidays.holiday_ending_sunday,\n schedule_holidays.holiday_start_or_end\n from schedule_timezones\n left join schedule_holidays\n on schedule_holidays.schedule_id = schedule_timezones.schedule_id\n and schedule_holidays.holiday_date >= schedule_timezones.schedule_valid_from\n and schedule_holidays.holiday_date < schedule_timezones.schedule_valid_until\n\n-- Find and count all holidays that fall within a schedule range.\n), valid_from_partition as(\n select\n join_holidays.*,\n row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index,\n count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index\n from join_holidays\n\n-- Label the partition start and add a row for to account for the partition end if there are multiple valid periods.\n), add_partition_end_row as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n case when valid_from_index = 1 and holiday_start_or_end is not null\n then 'partition_start'\n else holiday_start_or_end\n end as holiday_start_or_end,\n valid_from_index,\n max_valid_from_index\n from valid_from_partition\n \n union all\n\n -- when max_valid_from_index > 1, then we want to duplicate the last row to end the partition.\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n 'partition_end' as holiday_start_or_end,\n max_valid_from_index + 1 as valid_from_index,\n max_valid_from_index\n from valid_from_partition\n where max_valid_from_index > 1\n and valid_from_index = max_valid_from_index -- this finds the last rows to duplicate\n\n-- Adjusts and fills the valid from and valid until times for each partition, taking into account the partition start, gap, or holiday.\n), adjust_ranges as(\n select\n add_partition_end_row.*,\n case\n when holiday_start_or_end = 'partition_start'\n then schedule_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lag(holiday_ending_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_starting_sunday\n when holiday_start_or_end = 'partition_end'\n then holiday_ending_sunday\n else schedule_starting_sunday\n end as valid_from,\n case \n when holiday_start_or_end = 'partition_start'\n then holiday_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lead(holiday_starting_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_ending_sunday\n when holiday_start_or_end = 'partition_end'\n then schedule_ending_sunday\n else schedule_ending_sunday\n end as valid_until\n from add_partition_end_row\n\n), holiday_weeks as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n valid_from,\n valid_until,\n holiday_name,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_start_or_end,\n valid_from_index,\n case when holiday_start_or_end = '1_holiday'\n then 'holiday'\n else change_type\n end as change_type\n from adjust_ranges\n -- filter out irrelevant records after adjusting the ranges\n where not (valid_from >= valid_until and holiday_date is not null)\n\n-- Converts holiday valid_from and valid_until times into minutes from the start of the week, adjusting for timezones.\n), valid_minutes as(\n select\n holiday_weeks.*,\n\n -- Calculate holiday_valid_from in minutes from week start\n case when change_type = 'holiday' \n then (\n (\n (\n ((holiday_valid_from)::date - (holiday_starting_sunday)::date)\n * 24 + date_part('hour', (holiday_valid_from)::timestamp) - date_part('hour', (holiday_starting_sunday)::timestamp))\n * 60 + date_part('minute', (holiday_valid_from)::timestamp) - date_part('minute', (holiday_starting_sunday)::timestamp))\n \n - offset_minutes) -- timezone adjustment\n else null\n end as holiday_valid_from_minutes_from_week_start,\n\n -- Calculate holiday_valid_until in minutes from week start\n case when change_type = 'holiday' \n then (\n (\n (\n ((holiday_valid_until)::date - (holiday_starting_sunday)::date)\n * 24 + date_part('hour', (holiday_valid_until)::timestamp) - date_part('hour', (holiday_starting_sunday)::timestamp))\n * 60 + date_part('minute', (holiday_valid_until)::timestamp) - date_part('minute', (holiday_starting_sunday)::timestamp))\n \n + 24 * 60 -- add 1 day to set the upper bound of the holiday\n - offset_minutes)-- timezone adjustment\n else null\n end as holiday_valid_until_minutes_from_week_start\n from holiday_weeks\n\n-- Identifies whether a schedule overlaps with a holiday by comparing start and end times with holiday minutes.\n), find_holidays as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type,\n case \n when start_time_utc < holiday_valid_until_minutes_from_week_start\n and end_time_utc > holiday_valid_from_minutes_from_week_start\n and change_type = 'holiday' \n then holiday_name\n else cast(null as TEXT) \n end as holiday_name,\n count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc) as number_holidays_in_week\n from valid_minutes\n\n-- Filter out records where holiday overlaps don't match, ensuring each schedule's holiday status is consistent.\n), filter_holidays as(\n select \n *,\n cast(1 as integer) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week = 1\n\n union all\n\n -- CFount the number of records for each schedule start_time_utc and end_time_utc for filtering later.\n select \n distinct *,\n cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_name) \n as integer) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week > 1\n\n), final as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type\n from filter_holidays\n\n -- This filter ensures that for each schedule, the count of holidays in a week matches the number \n -- of distinct schedule records with the same start_time_utc and end_time_utc.\n -- Rows where this count doesn't match indicate overlap with a holiday, so we filter out that record.\n -- Additionally, schedule records that fall on a holiday are excluded by checking if holiday_name is null.\n where number_holidays_in_week = number_records_for_schedule_start_end\n and holiday_name is null\n\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_schedules": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_schedules", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_schedules.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_schedules.sql", "unique_id": "model.zendesk.int_zendesk__ticket_schedules", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_schedules"], "alias": "int_zendesk__ticket_schedules", "checksum": {"name": "sha256", "checksum": "30511daddcbbf831fc42f7e5039fad1c76a43499f3c208e1b982ab895dfa7d44"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.065963, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket as (\n \n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_schedule as (\n \n select *\n from {{ ref('stg_zendesk__ticket_schedule') }}\n\n), schedule as (\n \n select *\n from {{ ref('stg_zendesk__schedule') }}\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n{% if execute %}\n\n {% set default_schedule_id_query %}\n with set_default_schedule_flag as (\n select \n row_number() over (order by created_at) = 1 as is_default_schedule,\n id\n from {{ source('zendesk','schedule') }}\n where not coalesce(_fivetran_deleted, false)\n )\n select \n id\n from set_default_schedule_flag\n where is_default_schedule\n\n {% endset %}\n\n {% set default_schedule_id = run_query(default_schedule_id_query).columns[0][0]|string %}\n\n {% endif %}\n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '{{default_schedule_id}}' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -5, 'first_schedule.created_at') }} <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , {{ fivetran_utils.timestamp_add(\"hour\", 1000, \"\" ~ dbt.current_timestamp() ~ \"\") }} ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.timestamp_add", "macro.dbt.current_timestamp", "macro.dbt.run_query"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_schedules.sql", "compiled": true, "compiled_code": "\n\nwith ticket as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_schedule as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule\"\n\n), schedule as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n\n\n \n\n \n\n \n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '360000310393' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and \n\n first_schedule.created_at + ((interval '1 second') * (-5))\n\n <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , \n\n now() + ((interval '1 hour') * (1000))\n\n ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__assignee_updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__assignee_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__assignee_updates.sql", "original_file_path": "models/intermediate/int_zendesk__assignee_updates.sql", "unique_id": "model.zendesk.int_zendesk__assignee_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__assignee_updates"], "alias": "int_zendesk__assignee_updates", "checksum": {"name": "sha256", "checksum": "951ec2d4f8c9a7470a50cfc6e01838a090472a9f18fccd2dd65097d309d43aed"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.073171, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__assignee_updates\"", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__assignee_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comment_metrics": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comment_metrics", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__comment_metrics.sql", "original_file_path": "models/intermediate/int_zendesk__comment_metrics.sql", "unique_id": "model.zendesk.int_zendesk__comment_metrics", "fqn": ["zendesk", "intermediate", "int_zendesk__comment_metrics"], "alias": "int_zendesk__comment_metrics", "checksum": {"name": "sha256", "checksum": "b82ef2f9d10d6344cd46dcce904fe263a3b5b2cc12fd9b5c662e8b477a4b5f95"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.074645, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__comment_metrics\"", "raw_code": "with ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__comment_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_timezones": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_timezones", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_timezones.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_timezones.sql", "unique_id": "model.zendesk.int_zendesk__schedule_timezones", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_timezones"], "alias": "int_zendesk__schedule_timezones", "checksum": {"name": "sha256", "checksum": "b381e2d09c8d831ca04da433891079f92b9e08f9a932575f32c12c73fa3df3b8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.075899, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_timezones\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith split_timezones as (\n select *\n from {{ ref('int_zendesk__timezone_daylight') }} \n\n), schedule as (\n select \n *,\n max(created_at) over (partition by schedule_id) as max_created_at\n from {{ var('schedule') }} \n\n{% if var('using_schedule_histories', True) %}\n), schedule_history as (\n select *\n from {{ ref('int_zendesk__schedule_history') }} \n\n-- Select the most recent timezone associated with each schedule based on \n-- the max_created_at timestamp. Historical timezone changes are not yet tracked.\n), schedule_id_timezone as (\n select\n distinct schedule_id,\n lower(time_zone) as time_zone,\n schedule_name\n from schedule\n where created_at = max_created_at\n\n-- Combine historical schedules with the most recent timezone data. Filter \n-- out records where the timezone is missing, indicating the schedule has \n-- been deleted.\n), schedule_history_timezones as (\n select\n schedule_history.schedule_id,\n schedule_history.schedule_id_index,\n schedule_history.start_time,\n schedule_history.end_time,\n schedule_history.valid_from,\n schedule_history.valid_until,\n lower(schedule_id_timezone.time_zone) as time_zone,\n schedule_id_timezone.schedule_name\n from schedule_history\n left join schedule_id_timezone\n on schedule_id_timezone.schedule_id = schedule_history.schedule_id\n -- We have to filter these records out since time math requires timezone\n -- revisit later if this becomes a bigger issue\n where time_zone is not null\n\n-- Combine current schedules with historical schedules. Adjust the valid_from and valid_until dates accordingly.\n), union_schedule_histories as (\n select\n schedule_id,\n 0 as schedule_id_index, -- set the index as 0 for the current schedule\n created_at,\n start_time,\n end_time,\n lower(time_zone) as time_zone,\n schedule_name,\n cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill this value later\n cast({{ dbt.current_timestamp() }} as date) as valid_until,\n False as is_historical\n from schedule\n\n union all\n\n select\n schedule_id,\n schedule_id_index,\n cast(null as {{ dbt.type_timestamp() }}) as created_at,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n cast(valid_from as date) as valid_from,\n cast(valid_until as date) as valid_until,\n True as is_historical\n from schedule_history_timezones\n\n-- Set the schedule_valid_from for current schedules based on the most recent historical row.\n-- This allows the current schedule to pick up where the historical schedule left off.\n), fill_current_schedule as (\n select\n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n coalesce(case\n when schedule_id_index = 0\n -- get max valid_until from historical rows in the same schedule\n then max(case when schedule_id_index > 0 then valid_until end) \n over (partition by schedule_id)\n else valid_from\n end,\n cast(created_at as date))\n as schedule_valid_from,\n valid_until as schedule_valid_until\n from union_schedule_histories\n\n-- Detect adjacent time periods by lagging the schedule_valid_until value \n-- to identify effectively unchanged schedules.\n), lag_valid_until as (\n select \n fill_current_schedule.*,\n lag(schedule_valid_until) over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from, schedule_valid_until) as previous_valid_until\n from fill_current_schedule\n\n-- Identify distinct schedule groupings based on schedule_id, start_time, and end_time.\n-- Consolidate only adjacent schedules; if a schedule changes and later reverts to its original time, \n-- we want to maintain the intermediate schedule change.\n), find_actual_changes as (\n select \n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n\n -- The group_id increments only when there is a gap between the previous schedule's \n -- valid_until and the current schedule's valid_from, signaling the schedules are not adjacent.\n -- Adjacent schedules with the same start_time and end_time are grouped together, \n -- while non-adjacent schedules are treated as separate groups.\n sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row\n over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from\n rows between unbounded preceding and current row)\n as group_id\n from lag_valid_until\n\n-- Consolidate records into continuous periods by finding the minimum \n-- valid_from and maximum valid_until for each group.\n), consolidate_changes as (\n select \n schedule_id,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n group_id,\n min(schedule_id_index) as schedule_id_index, --helps with tracking downstream.\n min(schedule_valid_from) as schedule_valid_from,\n max(schedule_valid_until) as schedule_valid_until\n from find_actual_changes\n {{ dbt_utils.group_by(6) }}\n\n-- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01 for full schedule coverage.\n), reset_schedule_start as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n start_time,\n end_time,\n case \n when schedule_valid_from = min(schedule_valid_from) over (partition by schedule_id) then '1970-01-01'\n else schedule_valid_from\n end as schedule_valid_from,\n schedule_valid_until\n from consolidate_changes\n\n-- Adjust the schedule times to UTC by applying the timezone offset. Join all possible\n-- time_zone matches for each schedule. The erroneous timezones will be filtered next.\n), schedule_timezones as (\n select \n reset_schedule_start.schedule_id,\n reset_schedule_start.schedule_id_index,\n reset_schedule_start.time_zone,\n reset_schedule_start.schedule_name,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes,\n reset_schedule_start.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n reset_schedule_start.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n cast(reset_schedule_start.schedule_valid_from as {{ dbt.type_timestamp() }}) as schedule_valid_from,\n cast(reset_schedule_start.schedule_valid_until as {{ dbt.type_timestamp() }}) as schedule_valid_until,\n -- we'll use these to determine which schedule version to associate tickets with.\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as timezone_valid_from,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as timezone_valid_until\n from reset_schedule_start\n left join split_timezones\n on split_timezones.time_zone = reset_schedule_start.time_zone\n\n-- Assemble the final schedule-timezone relationship by determining the correct \n-- schedule_valid_from and schedule_valid_until based on overlapping periods \n-- between the schedule and timezone. \n), final_schedule as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n timezone_valid_from,\n timezone_valid_until,\n -- Be very careful if changing the order of these case whens--it does matter!\n case\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then schedule_valid_from\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then timezone_valid_from\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_from\n end as schedule_valid_from,\n case\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then schedule_valid_until\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then timezone_valid_until\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_until\n end as schedule_valid_until\n\n from schedule_timezones\n\n -- Filter records based on whether the schedule periods overlap with timezone periods. Capture\n -- when a schedule start or end falls within a time zone, and also capture timezones that exist\n -- entirely within the bounds of a schedule. \n -- timezone that a schedule start falls within\n where (schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until)\n -- timezone that a schedule end falls within\n or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until)\n -- timezones that fall completely within the bounds of the schedule\n or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until)\n\n{% else %} -- when not using schedule histories\n), final_schedule as (\n select \n schedule.schedule_id,\n 0 as schedule_id_index,\n lower(schedule.time_zone) as time_zone,\n schedule.schedule_name,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as schedule_valid_from,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_valid_until,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as timezone_valid_from,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as timezone_valid_until\n from schedule\n left join split_timezones\n on split_timezones.time_zone = lower(schedule.time_zone)\n{% endif %}\n\n), final as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_valid_from,\n schedule_valid_until,\n -- use dbt_date.week_start to ensure we truncate to Sunday\n cast({{ dbt_date.week_start('schedule_valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday,\n cast({{ dbt_date.week_start('schedule_valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday,\n -- Check if the start fo the schedule was from a schedule or timezone change for tracking downstream.\n case when schedule_valid_from = timezone_valid_from\n then 'timezone'\n else 'schedule'\n end as change_type\n from final_schedule\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__timezone_daylight", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}, {"name": "int_zendesk__schedule_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt.date_trunc", "macro.dbt_date.week_start"], "nodes": ["model.zendesk.int_zendesk__timezone_daylight", "model.zendesk_source.stg_zendesk__schedule", "model.zendesk.int_zendesk__schedule_history"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_timezones.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__timezone_daylight as (\n\n\nwith timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n now() + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n now() + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(now() as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as timestamp) as valid_from,\n cast(valid_until as timestamp) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final\n), split_timezones as (\n select *\n from __dbt__cte__int_zendesk__timezone_daylight \n\n), schedule as (\n select \n *,\n max(created_at) over (partition by schedule_id) as max_created_at\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n\n), schedule_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_history\" \n\n-- Select the most recent timezone associated with each schedule based on \n-- the max_created_at timestamp. Historical timezone changes are not yet tracked.\n), schedule_id_timezone as (\n select\n distinct schedule_id,\n lower(time_zone) as time_zone,\n schedule_name\n from schedule\n where created_at = max_created_at\n\n-- Combine historical schedules with the most recent timezone data. Filter \n-- out records where the timezone is missing, indicating the schedule has \n-- been deleted.\n), schedule_history_timezones as (\n select\n schedule_history.schedule_id,\n schedule_history.schedule_id_index,\n schedule_history.start_time,\n schedule_history.end_time,\n schedule_history.valid_from,\n schedule_history.valid_until,\n lower(schedule_id_timezone.time_zone) as time_zone,\n schedule_id_timezone.schedule_name\n from schedule_history\n left join schedule_id_timezone\n on schedule_id_timezone.schedule_id = schedule_history.schedule_id\n -- We have to filter these records out since time math requires timezone\n -- revisit later if this becomes a bigger issue\n where time_zone is not null\n\n-- Combine current schedules with historical schedules. Adjust the valid_from and valid_until dates accordingly.\n), union_schedule_histories as (\n select\n schedule_id,\n 0 as schedule_id_index, -- set the index as 0 for the current schedule\n created_at,\n start_time,\n end_time,\n lower(time_zone) as time_zone,\n schedule_name,\n cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill this value later\n cast(now() as date) as valid_until,\n False as is_historical\n from schedule\n\n union all\n\n select\n schedule_id,\n schedule_id_index,\n cast(null as timestamp) as created_at,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n cast(valid_from as date) as valid_from,\n cast(valid_until as date) as valid_until,\n True as is_historical\n from schedule_history_timezones\n\n-- Set the schedule_valid_from for current schedules based on the most recent historical row.\n-- This allows the current schedule to pick up where the historical schedule left off.\n), fill_current_schedule as (\n select\n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n coalesce(case\n when schedule_id_index = 0\n -- get max valid_until from historical rows in the same schedule\n then max(case when schedule_id_index > 0 then valid_until end) \n over (partition by schedule_id)\n else valid_from\n end,\n cast(created_at as date))\n as schedule_valid_from,\n valid_until as schedule_valid_until\n from union_schedule_histories\n\n-- Detect adjacent time periods by lagging the schedule_valid_until value \n-- to identify effectively unchanged schedules.\n), lag_valid_until as (\n select \n fill_current_schedule.*,\n lag(schedule_valid_until) over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from, schedule_valid_until) as previous_valid_until\n from fill_current_schedule\n\n-- Identify distinct schedule groupings based on schedule_id, start_time, and end_time.\n-- Consolidate only adjacent schedules; if a schedule changes and later reverts to its original time, \n-- we want to maintain the intermediate schedule change.\n), find_actual_changes as (\n select \n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n\n -- The group_id increments only when there is a gap between the previous schedule's \n -- valid_until and the current schedule's valid_from, signaling the schedules are not adjacent.\n -- Adjacent schedules with the same start_time and end_time are grouped together, \n -- while non-adjacent schedules are treated as separate groups.\n sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row\n over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from\n rows between unbounded preceding and current row)\n as group_id\n from lag_valid_until\n\n-- Consolidate records into continuous periods by finding the minimum \n-- valid_from and maximum valid_until for each group.\n), consolidate_changes as (\n select \n schedule_id,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n group_id,\n min(schedule_id_index) as schedule_id_index, --helps with tracking downstream.\n min(schedule_valid_from) as schedule_valid_from,\n max(schedule_valid_until) as schedule_valid_until\n from find_actual_changes\n group by 1,2,3,4,5,6\n\n-- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01 for full schedule coverage.\n), reset_schedule_start as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n start_time,\n end_time,\n case \n when schedule_valid_from = min(schedule_valid_from) over (partition by schedule_id) then '1970-01-01'\n else schedule_valid_from\n end as schedule_valid_from,\n schedule_valid_until\n from consolidate_changes\n\n-- Adjust the schedule times to UTC by applying the timezone offset. Join all possible\n-- time_zone matches for each schedule. The erroneous timezones will be filtered next.\n), schedule_timezones as (\n select \n reset_schedule_start.schedule_id,\n reset_schedule_start.schedule_id_index,\n reset_schedule_start.time_zone,\n reset_schedule_start.schedule_name,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes,\n reset_schedule_start.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n reset_schedule_start.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n cast(reset_schedule_start.schedule_valid_from as timestamp) as schedule_valid_from,\n cast(reset_schedule_start.schedule_valid_until as timestamp) as schedule_valid_until,\n -- we'll use these to determine which schedule version to associate tickets with.\n cast(date_trunc('day', split_timezones.valid_from) as timestamp) as timezone_valid_from,\n cast(date_trunc('day', split_timezones.valid_until) as timestamp) as timezone_valid_until\n from reset_schedule_start\n left join split_timezones\n on split_timezones.time_zone = reset_schedule_start.time_zone\n\n-- Assemble the final schedule-timezone relationship by determining the correct \n-- schedule_valid_from and schedule_valid_until based on overlapping periods \n-- between the schedule and timezone. \n), final_schedule as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n timezone_valid_from,\n timezone_valid_until,\n -- Be very careful if changing the order of these case whens--it does matter!\n case\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then schedule_valid_from\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then timezone_valid_from\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_from\n end as schedule_valid_from,\n case\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then schedule_valid_until\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then timezone_valid_until\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_until\n end as schedule_valid_until\n\n from schedule_timezones\n\n -- Filter records based on whether the schedule periods overlap with timezone periods. Capture\n -- when a schedule start or end falls within a time zone, and also capture timezones that exist\n -- entirely within the bounds of a schedule. \n -- timezone that a schedule start falls within\n where (schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until)\n -- timezone that a schedule end falls within\n or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until)\n -- timezones that fall completely within the bounds of the schedule\n or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until)\n\n\n\n), final as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_valid_from,\n schedule_valid_until,\n -- use dbt_date.week_start to ensure we truncate to Sunday\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_valid_from + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as schedule_starting_sunday,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_valid_until + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as schedule_ending_sunday,\n -- Check if the start fo the schedule was from a schedule or timezone change for tracking downstream.\n case when schedule_valid_from = timezone_valid_from\n then 'timezone'\n else 'schedule'\n end as change_type\n from final_schedule\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__timezone_daylight", "sql": " __dbt__cte__int_zendesk__timezone_daylight as (\n\n\nwith timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n now() + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n now() + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(now() as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as timestamp) as valid_from,\n cast(valid_until as timestamp) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_group": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_group.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_group.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_group", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_group"], "alias": "int_zendesk__ticket_historical_group", "checksum": {"name": "sha256", "checksum": "7d4d72f5d6a7ef73a23ad4be966b00683532fe2a11c9729a8d640752ebee1adc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.085171, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"", "raw_code": "with ticket_group_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_group.sql", "compiled": true, "compiled_code": "with ticket_group_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_history", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_history.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_history.sql", "unique_id": "model.zendesk.int_zendesk__schedule_history", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_history"], "alias": "int_zendesk__schedule_history", "checksum": {"name": "sha256", "checksum": "fa0eb9cea317033ef318536affc3f6a42cd178d1b0959d6341e2dbbdceed5ae0"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.0866601, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_history\"", "raw_code": "{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_histories'])) }}\n\nwith audit_logs as (\n select\n cast(source_id as {{ dbt.type_string() }}) as schedule_id,\n created_at,\n lower(change_description) as change_description\n from {{ var('audit_log') }}\n where lower(change_description) like '%workweek changed from%'\n\n-- the formats for change_description vary, so it needs to be cleaned\n), audit_logs_enhanced as (\n select \n schedule_id,\n rank() over (partition by schedule_id order by created_at desc) as schedule_id_index,\n created_at,\n -- Clean up the change_description, sometimes has random html stuff in it\n replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(change_description,\n 'workweek changed from', ''), \n '"', '\"'), \n 'amp;', ''), \n '=>', ':'), ':mon:', '\"mon\":'), ':tue:', '\"tue\":'), ':wed:', '\"wed\":'), ':thu:', '\"thu\":'), ':fri:', '\"fri\":'), ':sat:', '\"sat\":'), ':sun:', '\"sun\":')\n as change_description_cleaned\n from audit_logs\n\n), split_to_from as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n cast(created_at as date) as valid_from,\n -- each change_description has two parts: 1-from the old schedule 2-to the new schedule.\n {{ dbt.split_part('change_description_cleaned', \"' to '\", 1) }} as schedule_change_from,\n {{ dbt.split_part('change_description_cleaned', \"' to '\", 2) }} as schedule_change\n from audit_logs_enhanced\n\n), find_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n schedule_change_from,\n schedule_change,\n row_number() over (\n partition by schedule_id, valid_from -- valid from is type date\n -- ordering to get the latest change when there are multiple on one day\n order by schedule_id_index, schedule_change_from -- use the length of schedule_change_from to tie break, which will deprioritize empty \"from\" schedules\n ) as row_number\n from split_to_from\n\n-- multiple changes can occur on one day, so we will keep only the latest change in a day.\n), consolidate_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n lead(valid_from) over (\n partition by schedule_id order by schedule_id_index desc) as valid_until,\n schedule_change\n from find_same_day_changes\n where row_number = 1\n\n-- Creates a record for each day of the week for each schedule_change event.\n-- This is done by iterating over the days of the week, extracting the corresponding \n-- schedule data for each day, and unioning the results after each iteration.\n), split_days as (\n {% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %}\n {% for day, day_number in days_of_week.items() %}\n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n '{{ day }}' as day_of_week,\n cast('{{ day_number }}' as {{ dbt.type_int() }}) as day_of_week_number,\n {{ zendesk.regex_extract('schedule_change', day) }} as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n {% if not loop.last %}union all{% endif %}\n {% endfor %}\n\n-- A single day may contain multiple start and stop times, so we need to generate a separate record for each.\n-- The day_of_week_schedule is structured like a JSON string, requiring warehouse-specific logic to flatten it into individual records.\n{% if target.type == 'redshift' %}\n-- using PartiQL syntax to work with redshift's SUPER types, which requires an extra CTE\n), redshift_parse_schedule as (\n -- Redshift requires another CTE for unnesting \n select \n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n day_of_week,\n day_of_week_number,\n day_of_week_schedule,\n json_parse('[' || replace(replace(day_of_week_schedule, ', ', ','), ',', '},{') || ']') as json_schedule\n\n from split_days\n where day_of_week_schedule != '{}' -- exclude when the day_of_week_schedule in empty. \n\n), unnested_schedules as (\n select \n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n day_of_week,\n day_of_week_number,\n -- go back to strings\n cast(day_of_week_schedule as {{ dbt.type_string() }}) as day_of_week_schedule,\n {{ clean_schedule('JSON_SERIALIZE(unnested_schedule)') }} as cleaned_unnested_schedule\n \n from redshift_parse_schedule as schedules, schedules.json_schedule as unnested_schedule\n\n{% else %}\n), unnested_schedules as (\n select\n split_days.*,\n\n {%- if target.type == 'bigquery' %}\n {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule\n from split_days\n cross join unnest(json_extract_array('[' || replace(day_of_week_schedule, ',', '},{') || ']', '$')) as unnested_schedule\n\n {%- elif target.type == 'snowflake' %}\n unnested_schedule.key || ':' || unnested_schedule.value as cleaned_unnested_schedule\n from split_days\n cross join lateral flatten(input => parse_json(replace(replace(day_of_week_schedule, '\\}\\}', '\\}'), '\\{\\{', '\\{'))) as unnested_schedule\n\n {%- elif target.type == 'postgres' %}\n {{ clean_schedule('unnested_schedule::text') }} as cleaned_unnested_schedule\n from split_days\n cross join lateral jsonb_array_elements(('[' || replace(day_of_week_schedule, ',', '},{') || ']')::jsonb) as unnested_schedule\n\n {%- elif target.type in ('databricks', 'spark') %}\n {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule\n from split_days\n lateral view explode(from_json(concat('[', replace(day_of_week_schedule, ',', '},{'), ']'), 'array')) as unnested_schedule\n\n {% else %}\n cast(null as {{ dbt.type_string() }}) as cleaned_unnested_schedule\n from split_days\n {%- endif %}\n\n{% endif %}\n\n-- Each cleaned_unnested_schedule will have the format hh:mm:hh:mm, so we can extract each time part. \n), split_times as (\n select \n unnested_schedules.*,\n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 1) }}, ' ') as {{ dbt.type_int() }}) as start_time_hh, \n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 2) }}, ' ') as {{ dbt.type_int() }}) as start_time_mm, \n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 3) }}, ' ') as {{ dbt.type_int() }}) as end_time_hh, \n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 4) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm\n from unnested_schedules\n\n-- Calculate the start_time and end_time as minutes from Sunday\n), calculate_start_end_times as (\n select\n schedule_id,\n schedule_id_index,\n start_time_hh * 60 + start_time_mm + 24 * 60 * day_of_week_number as start_time,\n end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time,\n valid_from,\n valid_until,\n day_of_week,\n day_of_week_number\n from split_times\n)\n\nselect * \nfrom calculate_start_end_times", "language": "sql", "refs": [{"name": "stg_zendesk__audit_log", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.dbt.type_string", "macro.dbt.split_part", "macro.dbt.type_int", "macro.zendesk.regex_extract", "macro.zendesk.clean_schedule"], "nodes": ["model.zendesk_source.stg_zendesk__audit_log"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_history.sql", "compiled": true, "compiled_code": "\n\nwith audit_logs as (\n select\n cast(source_id as TEXT) as schedule_id,\n created_at,\n lower(change_description) as change_description\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log\"\n where lower(change_description) like '%workweek changed from%'\n\n-- the formats for change_description vary, so it needs to be cleaned\n), audit_logs_enhanced as (\n select \n schedule_id,\n rank() over (partition by schedule_id order by created_at desc) as schedule_id_index,\n created_at,\n -- Clean up the change_description, sometimes has random html stuff in it\n replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(change_description,\n 'workweek changed from', ''), \n '"', '\"'), \n 'amp;', ''), \n '=>', ':'), ':mon:', '\"mon\":'), ':tue:', '\"tue\":'), ':wed:', '\"wed\":'), ':thu:', '\"thu\":'), ':fri:', '\"fri\":'), ':sat:', '\"sat\":'), ':sun:', '\"sun\":')\n as change_description_cleaned\n from audit_logs\n\n), split_to_from as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n cast(created_at as date) as valid_from,\n -- each change_description has two parts: 1-from the old schedule 2-to the new schedule.\n \n\n \n \n\n split_part(\n change_description_cleaned,\n ' to ',\n 1\n )\n\n\n \n\n as schedule_change_from,\n \n\n \n \n\n split_part(\n change_description_cleaned,\n ' to ',\n 2\n )\n\n\n \n\n as schedule_change\n from audit_logs_enhanced\n\n), find_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n schedule_change_from,\n schedule_change,\n row_number() over (\n partition by schedule_id, valid_from -- valid from is type date\n -- ordering to get the latest change when there are multiple on one day\n order by schedule_id_index, schedule_change_from -- use the length of schedule_change_from to tie break, which will deprioritize empty \"from\" schedules\n ) as row_number\n from split_to_from\n\n-- multiple changes can occur on one day, so we will keep only the latest change in a day.\n), consolidate_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n lead(valid_from) over (\n partition by schedule_id order by schedule_id_index desc) as valid_until,\n schedule_change\n from find_same_day_changes\n where row_number = 1\n\n-- Creates a record for each day of the week for each schedule_change event.\n-- This is done by iterating over the days of the week, extracting the corresponding \n-- schedule data for each day, and unioning the results after each iteration.\n), split_days as (\n \n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'sun' as day_of_week,\n cast('0' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?sun.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'mon' as day_of_week,\n cast('1' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?mon.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'tue' as day_of_week,\n cast('2' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?tue.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'wed' as day_of_week,\n cast('3' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?wed.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'thu' as day_of_week,\n cast('4' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?thu.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'fri' as day_of_week,\n cast('5' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?fri.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'sat' as day_of_week,\n cast('6' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?sat.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n \n \n\n-- A single day may contain multiple start and stop times, so we need to generate a separate record for each.\n-- The day_of_week_schedule is structured like a JSON string, requiring warehouse-specific logic to flatten it into individual records.\n\n), unnested_schedules as (\n select\n split_days.*,\n replace(replace(replace(replace(cast(unnested_schedule::text as TEXT), '{', ''), '}', ''), '\"', ''), ' ', '') as cleaned_unnested_schedule\n from split_days\n cross join lateral jsonb_array_elements(('[' || replace(day_of_week_schedule, ',', '},{') || ']')::jsonb) as unnested_schedule\n\n\n\n-- Each cleaned_unnested_schedule will have the format hh:mm:hh:mm, so we can extract each time part. \n), split_times as (\n select \n unnested_schedules.*,\n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 1\n )\n\n\n \n\n, ' ') as integer) as start_time_hh, \n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 2\n )\n\n\n \n\n, ' ') as integer) as start_time_mm, \n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 3\n )\n\n\n \n\n, ' ') as integer) as end_time_hh, \n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 4\n )\n\n\n \n\n, ' ') as integer) as end_time_mm\n from unnested_schedules\n\n-- Calculate the start_time and end_time as minutes from Sunday\n), calculate_start_end_times as (\n select\n schedule_id,\n schedule_id_index,\n start_time_hh * 60 + start_time_mm + 24 * 60 * day_of_week_number as start_time,\n end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time,\n valid_from,\n valid_until,\n day_of_week,\n day_of_week_number\n from split_times\n)\n\nselect * \nfrom calculate_start_end_times", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_holiday": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_holiday", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_holiday.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_holiday.sql", "unique_id": "model.zendesk.int_zendesk__schedule_holiday", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_holiday"], "alias": "int_zendesk__schedule_holiday", "checksum": {"name": "sha256", "checksum": "6c29a7b0c63792193aff20d849a140f105431fc73033c4db32da15d7cfaad005"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.103731, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_holiday\"", "raw_code": "{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_holidays'])) }}\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may \n change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream \n to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time).\n*/\n\n\nwith schedule as (\n select *\n from {{ var('schedule') }} \n\n), schedule_holiday as (\n select *\n from {{ var('schedule_holiday') }} \n\n-- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start.\n), schedule_holiday_ranges as (\n select\n holiday_name,\n schedule_id,\n cast({{ dbt.date_trunc('day', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from,\n cast({{ dbt.date_trunc('day', 'holiday_end_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until,\n cast({{ dbt_date.week_start('holiday_start_date_at','UTC') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday,\n cast({{ dbt_date.week_start(dbt.dateadd('week', 1, 'holiday_end_date_at'),'UTC') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday,\n -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays.\n {{ dbt.datediff('holiday_start_date_at', 'holiday_end_date_at', 'week') }} + 1 as holiday_weeks_spanned\n from schedule_holiday\n\n-- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte.\n), expanded_holidays as (\n select\n schedule_holiday_ranges.*,\n cast(week_numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number\n from schedule_holiday_ranges\n -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks\n cross join ({{ dbt_utils.generate_series(upper_bound=52) }}) as week_numbers\n where schedule_holiday_ranges.holiday_weeks_spanned > 1\n and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned\n\n-- Define start and end times for each segment of a multi-week holiday.\n), split_multiweek_holidays as (\n\n -- Business as usual for holidays that fall within a single week.\n select\n holiday_name,\n schedule_id,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_weeks_spanned\n from schedule_holiday_ranges\n where holiday_weeks_spanned = 1\n\n union all\n\n -- Split holidays by week that span multiple weeks since the schedule spine is based on weeks.\n select\n holiday_name,\n schedule_id,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_valid_from\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})\n end as holiday_valid_from,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_valid_until\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', -1, dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday\n end as holiday_valid_until,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_starting_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})\n end as holiday_starting_sunday,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_ending_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})\n end as holiday_ending_sunday,\n holiday_weeks_spanned\n from expanded_holidays\n where holiday_weeks_spanned > 1\n\n-- Create a record for each the holiday start and holiday end for each week to use downstream.\n), split_holidays as (\n -- Creates a record that will be used for the time before a holiday\n select\n split_multiweek_holidays.*,\n holiday_valid_from as holiday_date,\n '0_gap' as holiday_start_or_end\n from split_multiweek_holidays\n\n union all\n\n -- Creates another record that will be used for the holiday itself\n select\n split_multiweek_holidays.*,\n holiday_valid_until as holiday_date,\n '1_holiday' as holiday_start_or_end\n from split_multiweek_holidays\n)\n\nselect *\nfrom split_holidays", "language": "sql", "refs": [{"name": "stg_zendesk__schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.dbt.date_trunc", "macro.dbt.type_timestamp", "macro.dbt_date.week_start", "macro.dbt.dateadd", "macro.dbt.datediff", "macro.dbt.type_int", "macro.dbt_utils.generate_series"], "nodes": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_holiday.sql", "compiled": true, "compiled_code": "\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may \n change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream \n to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time).\n*/\n\n\nwith schedule as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n), schedule_holiday as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\" \n\n-- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start.\n), schedule_holiday_ranges as (\n select\n holiday_name,\n schedule_id,\n cast(date_trunc('day', holiday_start_date_at) as timestamp) as holiday_valid_from,\n cast(date_trunc('day', holiday_end_date_at) as timestamp) as holiday_valid_until,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n holiday_start_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_starting_sunday,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n \n\n holiday_end_date_at + ((interval '1 week') * (1))\n\n + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_ending_sunday,\n -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays.\n \n (\n ((holiday_end_date_at)::date - (holiday_start_date_at)::date)\n / 7 + case\n when date_part('dow', (holiday_start_date_at)::timestamp) <= date_part('dow', (holiday_end_date_at)::timestamp) then\n case when holiday_start_date_at <= holiday_end_date_at then 0 else -1 end\n else\n case when holiday_start_date_at <= holiday_end_date_at then 1 else 0 end\n end)\n + 1 as holiday_weeks_spanned\n from schedule_holiday\n\n-- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte.\n), expanded_holidays as (\n select\n schedule_holiday_ranges.*,\n cast(week_numbers.generated_number as integer) as holiday_week_number\n from schedule_holiday_ranges\n -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks\n cross join (\n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n) as week_numbers\n where schedule_holiday_ranges.holiday_weeks_spanned > 1\n and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned\n\n-- Define start and end times for each segment of a multi-week holiday.\n), split_multiweek_holidays as (\n\n -- Business as usual for holidays that fall within a single week.\n select\n holiday_name,\n schedule_id,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_weeks_spanned\n from schedule_holiday_ranges\n where holiday_weeks_spanned = 1\n\n union all\n\n -- Split holidays by week that span multiple weeks since the schedule spine is based on weeks.\n select\n holiday_name,\n schedule_id,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_valid_from\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * ((holiday_week_number - 1) * 7))\n\n as timestamp)\n end as holiday_valid_from,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_valid_until\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n \n\n holiday_starting_sunday + ((interval '1 day') * (holiday_week_number * 7))\n\n + ((interval '1 day') * (-1))\n\n as timestamp) -- saturday\n end as holiday_valid_until,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_starting_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * ((holiday_week_number - 1) * 7))\n\n as timestamp)\n end as holiday_starting_sunday,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_ending_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * (holiday_week_number * 7))\n\n as timestamp)\n end as holiday_ending_sunday,\n holiday_weeks_spanned\n from expanded_holidays\n where holiday_weeks_spanned > 1\n\n-- Create a record for each the holiday start and holiday end for each week to use downstream.\n), split_holidays as (\n -- Creates a record that will be used for the time before a holiday\n select\n split_multiweek_holidays.*,\n holiday_valid_from as holiday_date,\n '0_gap' as holiday_start_or_end\n from split_multiweek_holidays\n\n union all\n\n -- Creates another record that will be used for the holiday itself\n select\n split_multiweek_holidays.*,\n holiday_valid_until as holiday_date,\n '1_holiday' as holiday_start_or_end\n from split_multiweek_holidays\n)\n\nselect *\nfrom split_holidays", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__requester_updates.sql", "original_file_path": "models/intermediate/int_zendesk__requester_updates.sql", "unique_id": "model.zendesk.int_zendesk__requester_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__requester_updates"], "alias": "int_zendesk__requester_updates", "checksum": {"name": "sha256", "checksum": "b2d14b09db3cadfb56e4b3dcb55c4f9000e670e3c7c29ef89b249e626e8ba103"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.113943, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_updates\"", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__requester_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_satisfaction.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_satisfaction"], "alias": "int_zendesk__ticket_historical_satisfaction", "checksum": {"name": "sha256", "checksum": "dce9b5b8705d72688802f99250a8f8a34b8791c3cb440f85efa11f09ebfe3e1d"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.1155572, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"", "raw_code": "with satisfaction_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "compiled": true, "compiled_code": "with satisfaction_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__latest_ticket_form": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__latest_ticket_form.sql", "original_file_path": "models/intermediate/int_zendesk__latest_ticket_form.sql", "unique_id": "model.zendesk.int_zendesk__latest_ticket_form", "fqn": ["zendesk", "intermediate", "int_zendesk__latest_ticket_form"], "alias": "int_zendesk__latest_ticket_form", "checksum": {"name": "sha256", "checksum": "906a97576bff9f4fead3b0ed4632aa8a04b94f523e62b0e05425770213f78ea5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.1169941, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__latest_ticket_form\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith ticket_form_history as (\n select *\n from {{ ref('stg_zendesk__ticket_form_history') }}\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__latest_ticket_form.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith ticket_form_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__ticket_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_aggregates"], "alias": "int_zendesk__ticket_aggregates", "checksum": {"name": "sha256", "checksum": "cef0c080fae7a2b361b077473aa1ccfd4bfa472469b9006038aa3866a5bf8b50"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.12063, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"", "raw_code": "with tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_tags as (\n\n select *\n from {{ ref('stg_zendesk__ticket_tag') }}\n\n), brands as (\n\n select *\n from {{ ref('stg_zendesk__brand') }}\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n {{ fivetran_utils.string_agg( 'ticket_tags.tags', \"', '\" )}} as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag", "model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_aggregates.sql", "compiled": true, "compiled_code": "with tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_tags as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag\"\n\n), brands as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n \n string_agg(ticket_tags.tags, ', ')\n\n as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__organization_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__organization_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__organization_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__organization_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__organization_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__organization_aggregates"], "alias": "int_zendesk__organization_aggregates", "checksum": {"name": "sha256", "checksum": "a16300f45d2cb0bd1c26dfec62e967a047095b92f340974bfef56178bfff6cf9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.124181, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"", "raw_code": "with organizations as (\n select * \n from {{ ref('stg_zendesk__organization') }}\n\n--If you use organization tags this will be included, if not it will be ignored.\n{% if var('using_organization_tags', True) %}\n), organization_tags as (\n select * \n from {{ ref('stg_zendesk__organization_tag') }}\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('organization_tags.tags', \"', '\" ) }} as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n{% endif %}\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n{% if var('using_domain_names', True) %}\n), domain_names as (\n\n select *\n from {{ ref('stg_zendesk__domain_name') }}\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('domain_names.domain_name', \"', '\" ) }} as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n{% endif %}\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,tag_aggregates.organization_tags\n {% endif %}\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,domain_aggregates.domain_names\n {% endif %}\n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n left join domain_aggregates\n using(organization_id)\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n left join tag_aggregates\n using(organization_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag", "package": null, "version": null}, {"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag", "model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__organization_aggregates.sql", "compiled": true, "compiled_code": "with organizations as (\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\n\n--If you use organization tags this will be included, if not it will be ignored.\n\n), organization_tags as (\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag\"\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(organization_tags.tags, ', ')\n\n as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n\n), domain_names as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(domain_names.domain_name, ', ')\n\n as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,tag_aggregates.organization_tags\n \n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,domain_aggregates.domain_names\n \n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n left join domain_aggregates\n using(organization_id)\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n left join tag_aggregates\n using(organization_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "operation.zendesk.zendesk-on-run-start-0": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk-on-run-start-0", "resource_type": "operation", "package_name": "zendesk", "path": "hooks/zendesk-on-run-start-0.sql", "original_file_path": "./dbt_project.yml", "unique_id": "operation.zendesk.zendesk-on-run-start-0", "fqn": ["zendesk", "hooks", "zendesk-on-run-start-0"], "alias": "zendesk-on-run-start-0", "checksum": {"name": "sha256", "checksum": "36fcc85263683c498c3e819ae5ca2dfac8f8dcdd9bef0c19497a6aed3b8d92e2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": ["on-run-start"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.1966121, "relation_name": null, "raw_code": "{{ fivetran_utils.empty_variable_warning(\"ticket_field_history_columns\", \"zendesk_ticket_field_history\") }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.empty_variable_warning"], "nodes": []}, "compiled_path": "target/compiled/zendesk/./dbt_project.yml/hooks/zendesk-on-run-start-0.sql", "compiled": true, "compiled_code": "\n\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "index": 0}, "model.zendesk_source.stg_zendesk__user_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user_tag.sql", "original_file_path": "models/stg_zendesk__user_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag", "fqn": ["zendesk_source", "stg_zendesk__user_tag"], "alias": "stg_zendesk__user_tag", "checksum": {"name": "sha256", "checksum": "0aabe5c461e492bc7afb162a0dcb6e3334cca4c60093eb5be52b74e5dbfa429b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6920571, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag\"", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__user_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tag_tmp')),\n staging_columns=get_user_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_tag.sql", "original_file_path": "models/stg_zendesk__ticket_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag", "fqn": ["zendesk_source", "stg_zendesk__ticket_tag"], "alias": "stg_zendesk__ticket_tag", "checksum": {"name": "sha256", "checksum": "41ea7cea80e135bf87adfff97bfadecd5c8ee0622d74f9904759305fd6cb7541"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.696689, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tag_tmp')),\n staging_columns=get_ticket_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n {% if target.type == 'redshift' %}\n \"tag\" as tags\n {% else %}\n tag as tags\n {% endif %}\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_tag.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n \n tag as tags\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_field_history.sql", "original_file_path": "models/stg_zendesk__ticket_field_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_field_history"], "alias": "stg_zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "5c165700bdcc50383952e4c645b4d6c42d5410205205c5de889b009dad3b0a10"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_starting_at": {"name": "valid_starting_at", "description": "The time the ticket field value became valid", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_ending_at": {"name": "valid_ending_at", "description": "The time the ticket field value became invalidated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.6978319, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_field_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_field_history_tmp')),\n staging_columns=get_ticket_field_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as {{ dbt.type_timestamp() }}) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as {{ dbt.type_timestamp() }}) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_field_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n field_name\n \n as \n \n field_name\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n updated\n \n as \n \n updated\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n value\n \n as \n \n value\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as timestamp) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as timestamp) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule_holiday.sql", "original_file_path": "models/stg_zendesk__schedule_holiday.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday", "fqn": ["zendesk_source", "stg_zendesk__schedule_holiday"], "alias": "stg_zendesk__schedule_holiday", "checksum": {"name": "sha256", "checksum": "7e546e0327511ba1db938c68a962b4892fe3462d4ffe23baf84aa1c88e4db9c1"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Information about holidays for each specified schedule.", "columns": {"end_date_at": {"name": "end_date_at", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_id": {"name": "holiday_id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_name": {"name": "holiday_name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date_at": {"name": "start_date_at", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.700835, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"", "raw_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_holidays'])) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_holiday_tmp') }}\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_holiday_tmp')),\n staging_columns=get_schedule_holiday_columns()\n )\n }}\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as {{ dbt.type_timestamp() }} ) as _fivetran_synced,\n cast(end_date as {{ dbt.type_timestamp() }} ) as holiday_end_date_at,\n cast(id as {{ dbt.type_string() }} ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as {{ dbt.type_string() }} ) as schedule_id,\n cast(start_date as {{ dbt.type_timestamp() }} ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.zendesk_source.get_schedule_holiday_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule_holiday.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n end_date\n \n as \n \n end_date\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n start_date\n \n as \n \n start_date\n \n\n\n\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as timestamp ) as _fivetran_synced,\n cast(end_date as timestamp ) as holiday_end_date_at,\n cast(id as TEXT ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as TEXT ) as schedule_id,\n cast(start_date as timestamp ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__daylight_time.sql", "original_file_path": "models/stg_zendesk__daylight_time.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time", "fqn": ["zendesk_source", "stg_zendesk__daylight_time"], "alias": "stg_zendesk__daylight_time", "checksum": {"name": "sha256", "checksum": "8bc98221c9781fc37b2424b62b5d72cd62b62c53aa887be08e98114f98530df9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset_minutes": {"name": "daylight_offset_minutes", "description": "Number of **minutes** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.69906, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__daylight_time_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__daylight_time_tmp')),\n staging_columns=get_daylight_time_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_daylight_time_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__daylight_time.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n daylight_end_utc\n \n as \n \n daylight_end_utc\n \n, \n \n \n daylight_offset\n \n as \n \n daylight_offset\n \n, \n \n \n daylight_start_utc\n \n as \n \n daylight_start_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n year\n \n as \n \n year\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization.sql", "original_file_path": "models/stg_zendesk__organization.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization", "fqn": ["zendesk_source", "stg_zendesk__organization"], "alias": "stg_zendesk__organization", "checksum": {"name": "sha256", "checksum": "5fb51f160efdf3ffa60e0a7be33e40e4b59f814d345558631e06fcce160f6329"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"organization_id": {"name": "organization_id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.690371, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tmp')),\n staging_columns=get_organization_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__organization_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_columns", "macro.fivetran_utils.fill_staging_columns", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n shared_comments\n \n as \n \n shared_comments\n \n, \n \n \n shared_tickets\n \n as \n \n shared_tickets\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__time_zone.sql", "original_file_path": "models/stg_zendesk__time_zone.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone", "fqn": ["zendesk_source", "stg_zendesk__time_zone"], "alias": "stg_zendesk__time_zone", "checksum": {"name": "sha256", "checksum": "289f08e30f9298f5b4beed89d28c1ff6a82386ee7c9f5084499eedb8998aa137"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset_minutes": {"name": "standard_offset_minutes", "description": "Standard offset of the timezone (non-daylight savings hours) in minutes.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.699759, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__time_zone_tmp') }}\n\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__time_zone_tmp')),\n staging_columns=get_time_zone_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=1) }} as {{ dbt.type_int() }} ) * 60 +\n (cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=2) }} as {{ dbt.type_int() }} ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}, {"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_time_zone_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.split_part", "macro.dbt.type_int"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__time_zone.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"\n\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n standard_offset\n \n as \n \n standard_offset\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 1\n )\n\n\n \n\n as integer ) * 60 +\n (cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 2\n )\n\n\n \n\n as integer ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__group.sql", "original_file_path": "models/stg_zendesk__group.sql", "unique_id": "model.zendesk_source.stg_zendesk__group", "fqn": ["zendesk_source", "stg_zendesk__group"], "alias": "stg_zendesk__group", "checksum": {"name": "sha256", "checksum": "21a956af3b03e9e49e9e94ade093fa716db9f061e7eb9e209c3ff7f9986b15b9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"group_id": {"name": "group_id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.689462, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__group_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__group_tmp')),\n staging_columns=get_group_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__group_tmp", "package": null, "version": null}, {"name": "stg_zendesk__group_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_group_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__group_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__group.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_comment.sql", "original_file_path": "models/stg_zendesk__ticket_comment.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment", "fqn": ["zendesk_source", "stg_zendesk__ticket_comment"], "alias": "stg_zendesk__ticket_comment", "checksum": {"name": "sha256", "checksum": "ffc2c4310aafe6b90a26e22cdab400e6d4c750faab7ea4d7519b2cf9105d3f16"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"ticket_comment_id": {"name": "ticket_comment_id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_facebook_comment": {"name": "is_facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_tweet": {"name": "is_tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_voice_comment": {"name": "is_voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.691776, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_comment_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_comment_tmp')),\n staging_columns=get_ticket_comment_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n _fivetran_deleted,\n body,\n cast(created as {{ dbt.type_timestamp() }}) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_comment_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_comment.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n body\n \n as \n \n body\n \n, \n cast(null as integer) as \n \n call_duration\n \n , \n cast(null as integer) as \n \n call_id\n \n , \n \n \n created\n \n as \n \n created\n \n, \n \n \n facebook_comment\n \n as \n \n facebook_comment\n \n, \n \n \n id\n \n as \n \n id\n \n, \n cast(null as integer) as \n \n location\n \n , \n \n \n public\n \n as \n \n public\n \n, \n cast(null as integer) as \n \n recording_url\n \n , \n cast(null as timestamp) as \n \n started_at\n \n , \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n cast(null as integer) as \n \n transcription_status\n \n , \n cast(null as integer) as \n \n transcription_text\n \n , \n cast(null as integer) as \n \n trusted\n \n , \n \n \n tweet\n \n as \n \n tweet\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n voice_comment\n \n as \n \n voice_comment\n \n, \n cast(null as integer) as \n \n voice_comment_transcription_visible\n \n \n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n _fivetran_deleted,\n body,\n cast(created as timestamp) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_schedule.sql", "original_file_path": "models/stg_zendesk__ticket_schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule", "fqn": ["zendesk_source", "stg_zendesk__ticket_schedule"], "alias": "stg_zendesk__ticket_schedule", "checksum": {"name": "sha256", "checksum": "69d32ac51b73241f990f8c1a08309cb42e79d0c1b26b99a7060353bfee88066e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6956532, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_schedule_tmp')),\n staging_columns=get_ticket_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(schedule_id as {{ dbt.type_string() }}) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as timestamp) as created_at,\n cast(schedule_id as TEXT) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule.sql", "original_file_path": "models/stg_zendesk__schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule", "fqn": ["zendesk_source", "stg_zendesk__schedule"], "alias": "stg_zendesk__schedule", "checksum": {"name": "sha256", "checksum": "336dabaf980af5f08c6a5f43d04cdfd00146191b0927176fe4add5f65117c673"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The support schedules created with different business hours and holidays.", "columns": {"schedule_id": {"name": "schedule_id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_name": {"name": "schedule_name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6953359, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_tmp')),\n staging_columns=get_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as {{ dbt.type_string() }}) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n end_time\n \n as \n \n end_time\n \n, \n \n \n end_time_utc\n \n as \n \n end_time_utc\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n start_time\n \n as \n \n start_time\n \n, \n \n \n start_time_utc\n \n as \n \n start_time_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as TEXT) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user.sql", "original_file_path": "models/stg_zendesk__user.sql", "unique_id": "model.zendesk_source.stg_zendesk__user", "fqn": ["zendesk_source", "stg_zendesk__user"], "alias": "stg_zendesk__user", "checksum": {"name": "sha256", "checksum": "7227f84c3600cc310217efae6695bc0f6aea11b2392f5709a54d444a772a9d2c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"user_id": {"name": "user_id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active": {"name": "is_active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_suspended": {"name": "is_suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.69407, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__user_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tmp')),\n staging_columns=get_user_columns()\n )\n }}\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n _fivetran_deleted,\n cast(last_login_at as {{ dbt.type_timestamp() }}) as last_login_at,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n email,\n name,\n organization_id,\n phone,\n {% if var('internal_user_criteria', false) -%}\n case \n when role in ('admin', 'agent') then role\n when {{ var('internal_user_criteria', false) }} then 'agent'\n else role end as role,\n {% else -%}\n role,\n {% endif -%}\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__user_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n active\n \n as \n \n active\n \n, \n \n \n alias\n \n as \n \n alias\n \n, \n \n \n authenticity_token\n \n as \n \n authenticity_token\n \n, \n \n \n chat_only\n \n as \n \n chat_only\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n email\n \n as \n \n email\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n last_login_at\n \n as \n \n last_login_at\n \n, \n \n \n locale\n \n as \n \n locale\n \n, \n \n \n locale_id\n \n as \n \n locale_id\n \n, \n \n \n moderator\n \n as \n \n moderator\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n only_private_comments\n \n as \n \n only_private_comments\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n phone\n \n as \n \n phone\n \n, \n \n \n remote_photo_url\n \n as \n \n remote_photo_url\n \n, \n \n \n restricted_agent\n \n as \n \n restricted_agent\n \n, \n \n \n role\n \n as \n \n role\n \n, \n \n \n shared\n \n as \n \n shared\n \n, \n \n \n shared_agent\n \n as \n \n shared_agent\n \n, \n \n \n signature\n \n as \n \n signature\n \n, \n \n \n suspended\n \n as \n \n suspended\n \n, \n \n \n ticket_restriction\n \n as \n \n ticket_restriction\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n two_factor_auth_enabled\n \n as \n \n two_factor_auth_enabled\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n verified\n \n as \n \n verified\n \n\n\n\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n _fivetran_deleted,\n cast(last_login_at as timestamp) as last_login_at,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n email,\n name,\n organization_id,\n phone,\n role,\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__brand.sql", "original_file_path": "models/stg_zendesk__brand.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand", "fqn": ["zendesk_source", "stg_zendesk__brand"], "alias": "stg_zendesk__brand", "checksum": {"name": "sha256", "checksum": "106699200d371f2fac9fe94ce084a357331b215d4130195e1e94d2d07c6d169c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"brand_id": {"name": "brand_id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.688405, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__brand_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__brand_tmp')),\n staging_columns=get_brand_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__brand_tmp", "package": null, "version": null}, {"name": "stg_zendesk__brand_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_brand_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__brand_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__brand.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n brand_url\n \n as \n \n brand_url\n \n, \n \n \n has_help_center\n \n as \n \n has_help_center\n \n, \n \n \n help_center_state\n \n as \n \n help_center_state\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n logo_content_type\n \n as \n \n logo_content_type\n \n, \n \n \n logo_content_url\n \n as \n \n logo_content_url\n \n, \n \n \n logo_deleted\n \n as \n \n logo_deleted\n \n, \n \n \n logo_file_name\n \n as \n \n logo_file_name\n \n, \n \n \n logo_height\n \n as \n \n logo_height\n \n, \n \n \n logo_id\n \n as \n \n logo_id\n \n, \n \n \n logo_inline\n \n as \n \n logo_inline\n \n, \n \n \n logo_mapped_content_url\n \n as \n \n logo_mapped_content_url\n \n, \n \n \n logo_size\n \n as \n \n logo_size\n \n, \n \n \n logo_url\n \n as \n \n logo_url\n \n, \n \n \n logo_width\n \n as \n \n logo_width\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n subdomain\n \n as \n \n subdomain\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_form_history.sql", "original_file_path": "models/stg_zendesk__ticket_form_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_form_history"], "alias": "stg_zendesk__ticket_form_history", "checksum": {"name": "sha256", "checksum": "1e70e9a0b2dfce82e649a8a0507d59d6f3f2832429191ea67988ba0dfd1017cf"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"ticket_form_id": {"name": "ticket_form_id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6962502, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_form_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_form_history_tmp')),\n staging_columns=get_ticket_form_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_form_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_form_history.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n display_name\n \n as \n \n display_name\n \n, \n \n \n end_user_visible\n \n as \n \n end_user_visible\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__audit_log": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__audit_log.sql", "original_file_path": "models/stg_zendesk__audit_log.sql", "unique_id": "model.zendesk_source.stg_zendesk__audit_log", "fqn": ["zendesk_source", "stg_zendesk__audit_log"], "alias": "stg_zendesk__audit_log", "checksum": {"name": "sha256", "checksum": "590bb4a276a0927000ab959d9acc6545321c095a21f49bbae95c56934ba22b39"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The `audit_log` table captures historical changes and actions within Zendesk. It provides a record of modifications made to tickets, schedules, and other objects, allowing for a detailed audit trail. Each row represents an action performed by an actor, including the time of the action, the affected entity, and a description of the changes. This table is especially useful for tracking schedule modifications and maintaining a history of schedule changes.\n", "columns": {"audit_log_id": {"name": "audit_log_id", "description": "The unique identifier for each audit log entry, representing a distinct action or change.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "action": {"name": "action", "description": "Describes the specific action performed within Zendesk, such as ticket updates or schedule modifications.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "actor_id": {"name": "actor_id", "description": "The unique identifier of the user or system responsible for performing the action.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "change_description": {"name": "change_description", "description": "A detailed description of the changes made during the action, capturing what was altered.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The timestamp indicating when the action was performed and recorded in the audit log.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_id": {"name": "source_id", "description": "The unique identifier of the entity affected by the action, such as a ticket or schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_label": {"name": "source_label", "description": "A label that provides additional context about the affected entity, typically related to its type or name.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_type": {"name": "source_type", "description": "Specifies the type of entity impacted by the action, such as a ticket, schedule, or user.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_synced": {"name": "_fivetran_synced", "description": "The timestamp when the record was last synchronized by Fivetran, used to track data freshness.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6864011, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log\"", "raw_code": "{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_histories'])) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__audit_log_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__audit_log_tmp')),\n staging_columns=get_audit_log_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n select \n cast(id as {{ dbt.type_string() }}) as audit_log_id,\n action,\n actor_id,\n change_description,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n source_id,\n source_label,\n source_type,\n _fivetran_synced\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__audit_log_tmp", "package": null, "version": null}, {"name": "stg_zendesk__audit_log_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.zendesk_source.get_audit_log_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_string", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__audit_log_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__audit_log.sql", "compiled": true, "compiled_code": "\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n action\n \n as \n \n action\n \n, \n \n \n actor_id\n \n as \n \n actor_id\n \n, \n \n \n change_description\n \n as \n \n change_description\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n source_id\n \n as \n \n source_id\n \n, \n \n \n source_label\n \n as \n \n source_label\n \n, \n \n \n source_type\n \n as \n \n source_type\n \n\n\n\n \n from base\n),\n\nfinal as (\n select \n cast(id as TEXT) as audit_log_id,\n action,\n actor_id,\n change_description,\n cast(created_at as timestamp) as created_at,\n source_id,\n source_label,\n source_type,\n _fivetran_synced\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__domain_name.sql", "original_file_path": "models/stg_zendesk__domain_name.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name", "fqn": ["zendesk_source", "stg_zendesk__domain_name"], "alias": "stg_zendesk__domain_name", "checksum": {"name": "sha256", "checksum": "8c3a4735e0cdea5a463eefc3c6820d15d622857af45dab942410dc64a0ac4bda"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6891232, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__domain_name_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__domain_name_tmp')),\n staging_columns=get_domain_name_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}, {"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_domain_name_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__domain_name.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n domain_name\n \n as \n \n domain_name\n \n, \n \n \n index\n \n as \n \n index\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization_tag.sql", "original_file_path": "models/stg_zendesk__organization_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag", "fqn": ["zendesk_source", "stg_zendesk__organization_tag"], "alias": "stg_zendesk__organization_tag", "checksum": {"name": "sha256", "checksum": "15f1f4014e4ba78ae7992f28c61e3926b7cd12c6bb32efc7b516db93c1e20d82"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.689887, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag\"", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tag_tmp')),\n staging_columns=get_organization_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket.sql", "original_file_path": "models/stg_zendesk__ticket.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket", "fqn": ["zendesk_source", "stg_zendesk__ticket"], "alias": "stg_zendesk__ticket", "checksum": {"name": "sha256", "checksum": "8a1201482d9f933a720698fa97c33d1499d5aeeaecd3706d97b3864b54eea531"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.687258, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tmp')),\n staging_columns=get_ticket_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n _fivetran_deleted,\n assignee_id,\n brand_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__ticket_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n allow_channelback\n \n as \n \n allow_channelback\n \n, \n \n \n assignee_id\n \n as \n \n assignee_id\n \n, \n \n \n brand_id\n \n as \n \n brand_id\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n description\n \n as \n \n description\n \n, \n \n \n due_at\n \n as \n \n due_at\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n forum_topic_id\n \n as \n \n forum_topic_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n has_incidents\n \n as \n \n has_incidents\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n is_public\n \n as \n \n is_public\n \n, \n \n \n merged_ticket_ids\n \n as \n \n merged_ticket_ids\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n priority\n \n as \n \n priority\n \n, \n \n \n problem_id\n \n as \n \n problem_id\n \n, \n \n \n recipient\n \n as \n \n recipient\n \n, \n \n \n requester_id\n \n as \n \n requester_id\n \n, \n \n \n status\n \n as \n \n status\n \n, \n \n \n subject\n \n as \n \n subject\n \n, \n \n \n submitter_id\n \n as \n \n submitter_id\n \n, \n cast(null as integer) as \n \n system_ccs\n \n , \n \n \n system_client\n \n as \n \n system_client\n \n, \n cast(null as TEXT) as \n \n system_ip_address\n \n , \n cast(null as integer) as \n \n system_json_email_identifier\n \n , \n cast(null as float) as \n \n system_latitude\n \n , \n cast(null as TEXT) as \n \n system_location\n \n , \n cast(null as float) as \n \n system_longitude\n \n , \n cast(null as integer) as \n \n system_machine_generated\n \n , \n cast(null as integer) as \n \n system_message_id\n \n , \n cast(null as integer) as \n \n system_raw_email_identifier\n \n , \n \n \n ticket_form_id\n \n as \n \n ticket_form_id\n \n, \n \n \n type\n \n as \n \n type\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n via_channel\n \n as \n \n via_channel\n \n, \n \n \n via_source_from_address\n \n as \n \n via_source_from_address\n \n, \n \n \n via_source_from_id\n \n as \n \n via_source_from_id\n \n, \n \n \n via_source_from_title\n \n as \n \n via_source_from_title\n \n, \n \n \n via_source_rel\n \n as \n \n via_source_rel\n \n, \n \n \n via_source_to_address\n \n as \n \n via_source_to_address\n \n, \n \n \n via_source_to_name\n \n as \n \n via_source_to_name\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n _fivetran_deleted,\n assignee_id,\n brand_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__daylight_time_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__daylight_time_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__daylight_time_tmp"], "alias": "stg_zendesk__daylight_time_tmp", "checksum": {"name": "sha256", "checksum": "01afb893cce2ef776ef8c4c64dbd2cf3e40fe1f73986fdc4b78fd99ff0948ac8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.407951, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'daylight_time')) }}\nfrom {{ source('zendesk', 'daylight_time') }} as daylight_time_table", "language": "sql", "refs": [], "sources": [["zendesk", "daylight_time"], ["zendesk", "daylight_time"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__daylight_time_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"year\",\n \"_fivetran_synced\",\n \"daylight_end_utc\",\n \"daylight_offset\",\n \"daylight_start_utc\"\nfrom \"postgres\".\"zz_zendesk\".\"daylight_time_data\" as daylight_time_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tmp"], "alias": "stg_zendesk__user_tmp", "checksum": {"name": "sha256", "checksum": "606364c3b138f68707d75a04f859f28d4b0f17f99966b27a8f6087adfa091042"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.4207711, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','user')) }} \nfrom {{ source('zendesk','user') }} as user_table", "language": "sql", "refs": [], "sources": [["zendesk", "user"], ["zendesk", "user"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"active\",\n \"alias\",\n \"authenticity_token\",\n \"chat_only\",\n \"created_at\",\n \"details\",\n \"email\",\n \"external_id\",\n \"last_login_at\",\n \"locale\",\n \"locale_id\",\n \"moderator\",\n \"name\",\n \"notes\",\n \"only_private_comments\",\n \"organization_id\",\n \"phone\",\n \"remote_photo_url\",\n \"restricted_agent\",\n \"role\",\n \"shared\",\n \"shared_agent\",\n \"signature\",\n \"suspended\",\n \"ticket_restriction\",\n \"time_zone\",\n \"two_factor_auth_enabled\",\n \"updated_at\",\n \"url\",\n \"verified\" \nfrom \"postgres\".\"zz_zendesk\".\"user_data\" as user_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__group_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__group_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__group_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__group_tmp"], "alias": "stg_zendesk__group_tmp", "checksum": {"name": "sha256", "checksum": "dc91ce1ab4b5ce5fec29b74b8f999d04fa063ab6354b7387d5875997f4db7e11"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.4243689, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','group')) }} \nfrom {{ source('zendesk','group') }} as group_table", "language": "sql", "refs": [], "sources": [["zendesk", "group"], ["zendesk", "group"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.group"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__group_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"created_at\",\n \"name\",\n \"updated_at\",\n \"url\" \nfrom \"postgres\".\"zz_zendesk\".\"group_data\" as group_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tmp"], "alias": "stg_zendesk__ticket_tmp", "checksum": {"name": "sha256", "checksum": "b90132a6d22e753a066ebeaaea0bc164376837b702d7886ad0d1bb1a993e6e9a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.427995, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket')) }}\nfrom {{ source('zendesk', 'ticket') }} as ticket_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket"], ["zendesk", "ticket"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"allow_channelback\",\n \"assignee_id\",\n \"brand_id\",\n \"created_at\",\n \"description\",\n \"due_at\",\n \"external_id\",\n \"forum_topic_id\",\n \"group_id\",\n \"has_incidents\",\n \"is_public\",\n \"organization_id\",\n \"priority\",\n \"problem_id\",\n \"recipient\",\n \"requester_id\",\n \"status\",\n \"subject\",\n \"submitter_id\",\n \"system_client\",\n \"ticket_form_id\",\n \"type\",\n \"updated_at\",\n \"url\",\n \"via_channel\",\n \"via_source_from_id\",\n \"via_source_from_title\",\n \"via_source_rel\",\n \"via_source_to_address\",\n \"via_source_to_name\",\n \"merged_ticket_ids\",\n \"via_source_from_address\",\n \"followup_ids\",\n \"via_followup_source_id\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_data\" as ticket_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__brand_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__brand_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__brand_tmp"], "alias": "stg_zendesk__brand_tmp", "checksum": {"name": "sha256", "checksum": "9658c9bd90fda5610067615a971eff98dc7c7b8c04827b9ab04da65f28630381"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.4331148, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','brand')) }} \nfrom {{ source('zendesk','brand') }} as brand_table", "language": "sql", "refs": [], "sources": [["zendesk", "brand"], ["zendesk", "brand"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.brand"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__brand_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"brand_url\",\n \"default\",\n \"has_help_center\",\n \"help_center_state\",\n \"logo_content_type\",\n \"logo_content_url\",\n \"logo_deleted\",\n \"logo_file_name\",\n \"logo_height\",\n \"logo_id\",\n \"logo_inline\",\n \"logo_mapped_content_url\",\n \"logo_size\",\n \"logo_url\",\n \"logo_width\",\n \"name\",\n \"subdomain\",\n \"url\" \nfrom \"postgres\".\"zz_zendesk\".\"brand_data\" as brand_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tag_tmp"], "alias": "stg_zendesk__ticket_tag_tmp", "checksum": {"name": "sha256", "checksum": "d88425c9db1a948768fa8683e58654de3aab9ffc2966d829b6707c12afd94283"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.437031, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_tag')) }}\nfrom {{ source('zendesk', 'ticket_tag') }} as ticket_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_tag"], ["zendesk", "ticket_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tag_tmp.sql", "compiled": true, "compiled_code": "select \"tag\",\n \"ticket_id\",\n \"_fivetran_synced\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_tag_data\" as ticket_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_holiday_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_holiday_tmp"], "alias": "stg_zendesk__schedule_holiday_tmp", "checksum": {"name": "sha256", "checksum": "caed8406693ab67a1ae858708ab0e22185d3c333ece3db5602b527bfeed8863e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.4410582, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"", "raw_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_holidays'])) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule_holiday')) }}\nfrom {{ source('zendesk', 'schedule_holiday') }} as schedule_holiday_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule_holiday"], ["zendesk", "schedule_holiday"]], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"schedule_id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_date\",\n \"name\",\n \"start_date\"\nfrom \"postgres\".\"zz_zendesk\".\"schedule_holiday_data\" as schedule_holiday_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tag_tmp"], "alias": "stg_zendesk__user_tag_tmp", "checksum": {"name": "sha256", "checksum": "7ee78431bec698af41296439428c74a8d5f8fa607c55e9b5a9b97de8b777f490"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.4449952, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','user_tag')) }} \nfrom {{ source('zendesk','user_tag') }} as user_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "user_tag"], ["zendesk", "user_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nselect \"tag\",\n \"user_id\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zz_zendesk\".\"user_tag_data\" as user_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_field_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_field_history_tmp"], "alias": "stg_zendesk__ticket_field_history_tmp", "checksum": {"name": "sha256", "checksum": "9dbb7257a2998c6e0d0d7a572aa7b0d301c777cea8e7085abfa42809b9312aa7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.4503849, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_field_history')) }}\nfrom {{ source('zendesk', 'ticket_field_history') }} as ticket_field_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_field_history"], ["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "compiled": true, "compiled_code": "select \"field_name\",\n \"ticket_id\",\n \"updated\",\n \"_fivetran_synced\",\n \"user_id\",\n \"value\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_field_history_data\" as ticket_field_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_form_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_form_history_tmp"], "alias": "stg_zendesk__ticket_form_history_tmp", "checksum": {"name": "sha256", "checksum": "0e95f65a6932c12231ef9419574fd09b287a70ca20612cce228a7fb642fe1609"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.4543612, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_form_history')) }}\nfrom {{ source('zendesk', 'ticket_form_history') }} as ticket_form_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_form_history"], ["zendesk", "ticket_form_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"updated_at\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"created_at\",\n \"display_name\",\n \"end_user_visible\",\n \"name\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_form_history_data\" as ticket_form_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_comment_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_comment_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_comment_tmp"], "alias": "stg_zendesk__ticket_comment_tmp", "checksum": {"name": "sha256", "checksum": "756209cf9e8c53e873cd7ac7a2dce2bdbafbd5a9d416e503c628b3ee57603c86"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.458412, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_comment')) }}\nfrom {{ source('zendesk', 'ticket_comment') }} as ticket_comment_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_comment"], ["zendesk", "ticket_comment"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_comment_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"body\",\n \"created\",\n \"facebook_comment\",\n \"public\",\n \"ticket_id\",\n \"tweet\",\n \"user_id\",\n \"voice_comment\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_comment_data\" as ticket_comment_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tag_tmp"], "alias": "stg_zendesk__organization_tag_tmp", "checksum": {"name": "sha256", "checksum": "b917812c188e64cda849a61d784cd95507c1c9187fc0ef2e083f2eee61c58231"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.461889, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','organization_tag')) }} \nfrom {{ source('zendesk','organization_tag') }} as organization_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization_tag"], ["zendesk", "organization_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nselect \"organization_id\",\n \"tag\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zz_zendesk\".\"organization_tag_data\" as organization_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__audit_log_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__audit_log_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__audit_log_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__audit_log_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__audit_log_tmp"], "alias": "stg_zendesk__audit_log_tmp", "checksum": {"name": "sha256", "checksum": "875185f07856608bdc8129d3ad2cef7ff5dfc2acdf252146ea493a69c889cfed"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.4661481, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log_tmp\"", "raw_code": "{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_histories'])) }}\n\nselect {{ dbt_utils.star(source('zendesk','audit_log')) }} \nfrom {{ source('zendesk','audit_log') }} as audit_log_table", "language": "sql", "refs": [], "sources": [["zendesk", "audit_log"], ["zendesk", "audit_log"]], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.audit_log"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__audit_log_tmp.sql", "compiled": true, "compiled_code": "\n\nselect \"id\",\n \"_fivetran_synced\",\n \"action\",\n \"actor_id\",\n \"change_description\",\n \"created_at\",\n \"source_id\",\n \"source_label\",\n \"source_type\" \nfrom \"postgres\".\"zz_zendesk\".\"audit_log_data\" as audit_log_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_tmp"], "alias": "stg_zendesk__schedule_tmp", "checksum": {"name": "sha256", "checksum": "7d55acbaaa3cc93868bcd3fe4f945b1ecb4871da7b8bed7bf04714ce3fc11eef"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.470576, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule')) }}\nfrom {{ source('zendesk', 'schedule') }} as schedule_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule"], ["zendesk", "schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"end_time\",\n \"id\",\n \"start_time\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_time_utc\",\n \"name\",\n \"start_time_utc\",\n \"time_zone\",\n \"created_at\"\nfrom \"postgres\".\"zz_zendesk\".\"schedule_data\" as schedule_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tmp"], "alias": "stg_zendesk__organization_tmp", "checksum": {"name": "sha256", "checksum": "f2b39377f97f3a1a71fee168330c6971c06292c4ea702091a978eb64af9bd28f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.474454, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'organization')) }}\nfrom {{ source('zendesk','organization') }} as organization_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization"], ["zendesk", "organization"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"created_at\",\n \"details\",\n \"external_id\",\n \"group_id\",\n \"name\",\n \"notes\",\n \"shared_comments\",\n \"shared_tickets\",\n \"updated_at\",\n \"url\"\nfrom \"postgres\".\"zz_zendesk\".\"organization_data\" as organization_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_schedule_tmp"], "alias": "stg_zendesk__ticket_schedule_tmp", "checksum": {"name": "sha256", "checksum": "59d017b8bb4285288bd47b79a1cb1afdb64faca436f52a718f6c8051d24cf6f1"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.4780102, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\n{%- set source_relation = adapter.get_relation(\n database=source('zendesk', 'ticket_schedule').database,\n schema=source('zendesk', 'ticket_schedule').schema,\n identifier=source('zendesk', 'ticket_schedule').name) -%}\n\n{% set table_exists=source_relation is not none %}\n\n{% if table_exists %}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_schedule')) }}\nfrom {{ source('zendesk', 'ticket_schedule') }} as ticket_schedule_table\n\n{% else %}\n\nselect\n cast(null as {{ dbt.type_timestamp() }}) as _fivetran_synced,\n cast(null as {{ dbt.type_timestamp() }}) as created_at,\n cast(null as {{ dbt.type_int() }}) as schedule_id,\n cast(null as {{ dbt.type_int() }}) as ticket_id\n\n{% endif %}", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\n\n\nselect \"created_at\",\n \"ticket_id\",\n \"_fivetran_synced\",\n \"schedule_id\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_schedule_data\" as ticket_schedule_table\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__domain_name_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__domain_name_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__domain_name_tmp"], "alias": "stg_zendesk__domain_name_tmp", "checksum": {"name": "sha256", "checksum": "58ba804a3f1cf2e7abe29a28cc9064e9be0355e6b358cca9e714e5777ff11b4b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.48525, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'domain_name')) }} \nfrom {{ source('zendesk', 'domain_name') }} as domain_name_table", "language": "sql", "refs": [], "sources": [["zendesk", "domain_name"], ["zendesk", "domain_name"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__domain_name_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nselect \"index\",\n \"organization_id\",\n \"_fivetran_synced\",\n \"domain_name\" \nfrom \"postgres\".\"zz_zendesk\".\"domain_name_data\" as domain_name_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__time_zone_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__time_zone_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__time_zone_tmp"], "alias": "stg_zendesk__time_zone_tmp", "checksum": {"name": "sha256", "checksum": "b2a214af27259564121fd0c977a7d7388bd644f797f972ed48575a4979819ec2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.489658, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'time_zone')) }} \nfrom {{ source('zendesk', 'time_zone') }} as time_zone_table", "language": "sql", "refs": [], "sources": [["zendesk", "time_zone"], ["zendesk", "time_zone"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__time_zone_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"_fivetran_synced\",\n \"standard_offset\" \nfrom \"postgres\".\"zz_zendesk\".\"time_zone_data\" as time_zone_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef", "fqn": ["zendesk", "unique_zendesk__ticket_enriched_ticket_id"], "alias": "unique_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.66899, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}}, "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "fqn": ["zendesk", "not_null_zendesk__ticket_enriched_ticket_id"], "alias": "not_null_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.670188, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}}, "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__sla_policies_sla_event_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__sla_policies_sla_event_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd", "fqn": ["zendesk", "unique_zendesk__sla_policies_sla_event_id"], "alias": "unique_zendesk__sla_policies_sla_event_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.671063, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__sla_policies"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__sla_policies_sla_event_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n sla_event_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__sla_policies\"\nwhere sla_event_id is not null\ngroup by sla_event_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "sla_event_id", "file_key_name": "models.zendesk__sla_policies", "attached_node": "model.zendesk.zendesk__sla_policies", "test_metadata": {"name": "unique", "kwargs": {"column_name": "sla_event_id", "model": "{{ get_where_subquery(ref('zendesk__sla_policies')) }}"}, "namespace": null}}, "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c", "fqn": ["zendesk", "unique_zendesk__ticket_metrics_ticket_id"], "alias": "unique_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.672031, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}}, "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "fqn": ["zendesk", "not_null_zendesk__ticket_metrics_ticket_id"], "alias": "not_null_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.672837, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_ticket_id"], "alias": "unique_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.7014642, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_ticket_id"], "alias": "not_null_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.702423, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e", "fqn": ["zendesk_source", "unique_stg_zendesk__brand_brand_id"], "alias": "unique_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.703265, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n brand_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is not null\ngroup by brand_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand", "test_metadata": {"name": "unique", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "fqn": ["zendesk_source", "not_null_stg_zendesk__brand_brand_id"], "alias": "not_null_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.704643, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect brand_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__domain_name_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__domain_name_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3", "fqn": ["zendesk_source", "not_null_stg_zendesk__domain_name_organization_id"], "alias": "not_null_stg_zendesk__domain_name_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.705457, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__domain_name_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__domain_name", "attached_node": "model.zendesk_source.stg_zendesk__domain_name", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__domain_name')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd", "fqn": ["zendesk_source", "unique_stg_zendesk__group_group_id"], "alias": "unique_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.706281, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n group_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is not null\ngroup by group_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group", "test_metadata": {"name": "unique", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "fqn": ["zendesk_source", "not_null_stg_zendesk__group_group_id"], "alias": "not_null_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.707086, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect group_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31", "fqn": ["zendesk_source", "unique_stg_zendesk__organization_organization_id"], "alias": "unique_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.708081, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n organization_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is not null\ngroup by organization_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization", "test_metadata": {"name": "unique", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "fqn": ["zendesk_source", "not_null_stg_zendesk__organization_organization_id"], "alias": "not_null_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.708991, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.709823, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_comment_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is not null\ngroup by ticket_comment_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.710643, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_comment_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11", "fqn": ["zendesk_source", "unique_stg_zendesk__user_user_id"], "alias": "unique_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.711457, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n user_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is not null\ngroup by user_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user", "test_metadata": {"name": "unique", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "fqn": ["zendesk_source", "not_null_stg_zendesk__user_user_id"], "alias": "not_null_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.7123241, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect user_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_form_history_ticket_form_id"], "alias": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.71314, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_form_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\nwhere ticket_form_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_form_id", "file_key_name": "models.stg_zendesk__ticket_form_history", "attached_node": "model.zendesk_source.stg_zendesk__ticket_form_history", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_form_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_form_history')) }}"}, "namespace": null}}, "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year", "resource_type": "test", "package_name": "zendesk_source", "path": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d", "fqn": ["zendesk_source", "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year"], "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9"}, "created_at": 1728492761.713957, "relation_name": null, "raw_code": "{{ dbt_utils.test_unique_combination_of_columns(**_dbt_generic_test_kwargs) }}{{ config(alias=\"dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9\") }}", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.test_unique_combination_of_columns", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "compiled": true, "compiled_code": "\n\n\n\n\n\nwith validation_errors as (\n\n select\n time_zone, year\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n group by time_zone, year\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.stg_zendesk__daylight_time", "attached_node": "model.zendesk_source.stg_zendesk__daylight_time", "test_metadata": {"name": "unique_combination_of_columns", "kwargs": {"combination_of_columns": ["time_zone", "year"], "model": "{{ get_where_subquery(ref('stg_zendesk__daylight_time')) }}"}, "namespace": "dbt_utils"}}, "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf", "fqn": ["zendesk_source", "unique_stg_zendesk__time_zone_time_zone"], "alias": "unique_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.726451, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n time_zone as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is not null\ngroup by time_zone\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone", "test_metadata": {"name": "unique", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "fqn": ["zendesk_source", "not_null_stg_zendesk__time_zone_time_zone"], "alias": "not_null_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.727293, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect time_zone\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a", "fqn": ["zendesk_source", "unique_stg_zendesk__schedule_holiday_holiday_id"], "alias": "unique_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.728309, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n holiday_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is not null\ngroup by holiday_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday", "test_metadata": {"name": "unique", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "fqn": ["zendesk_source", "not_null_stg_zendesk__schedule_holiday_holiday_id"], "alias": "not_null_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.729172, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect holiday_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}}}, "sources": {"source.zendesk_source.zendesk.audit_log": {"database": "postgres", "schema": "zz_zendesk", "name": "audit_log", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.audit_log", "fqn": ["zendesk_source", "zendesk", "audit_log"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "audit_log_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The `audit_log` table captures historical changes and actions within Zendesk. It provides a record of modifications made to tickets, schedules, and other objects, allowing for a detailed audit trail. Each row represents an action performed by an actor, including the time of the action, the affected entity, and a description of the changes. This table is especially useful for tracking schedule modifications and maintaining a history of schedule changes.\n", "columns": {"id": {"name": "id", "description": "The unique identifier for each audit log entry, representing a distinct action or change.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "action": {"name": "action", "description": "Describes the specific action performed within Zendesk, such as ticket updates or schedule modifications.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "actor_id": {"name": "actor_id", "description": "The unique identifier of the user or system responsible for performing the action.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "change_description": {"name": "change_description", "description": "A detailed description of the changes made during the action, capturing what was altered.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The timestamp indicating when the action was performed and recorded in the audit log.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_id": {"name": "source_id", "description": "The unique identifier of the entity affected by the action, such as a ticket or schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_label": {"name": "source_label", "description": "A label that provides additional context about the affected entity, typically related to its type or name.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_type": {"name": "source_type", "description": "Specifies the type of entity impacted by the action, such as a ticket, schedule, or user.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_synced": {"name": "_fivetran_synced", "description": "The timestamp when the record was last synchronized by Fivetran, used to track data freshness.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"audit_log_data\"", "created_at": 1728492761.798353}, "source.zendesk_source.zendesk.ticket": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket", "fqn": ["zendesk_source", "zendesk", "ticket"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_channel": {"name": "via_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_id": {"name": "via_source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_title": {"name": "via_source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_rel": {"name": "via_source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_address": {"name": "via_source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_name": {"name": "via_source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_data\"", "created_at": 1728492761.799576}, "source.zendesk_source.zendesk.brand": {"database": "postgres", "schema": "zz_zendesk", "name": "brand", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.brand", "fqn": ["zendesk_source", "zendesk", "brand"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "brand_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"id": {"name": "id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "created_at": 1728492761.799726}, "source.zendesk_source.zendesk.domain_name": {"database": "postgres", "schema": "zz_zendesk", "name": "domain_name", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.domain_name", "fqn": ["zendesk_source", "zendesk", "domain_name"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "domain_name_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"domain_name_data\"", "created_at": 1728492761.79984}, "source.zendesk_source.zendesk.group": {"database": "postgres", "schema": "zz_zendesk", "name": "group", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.group", "fqn": ["zendesk_source", "zendesk", "group"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "group_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"group_data\"", "created_at": 1728492761.8000891}, "source.zendesk_source.zendesk.organization_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization_tag", "fqn": ["zendesk_source", "zendesk", "organization_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "created_at": 1728492761.800256}, "source.zendesk_source.zendesk.organization": {"database": "postgres", "schema": "zz_zendesk", "name": "organization", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization", "fqn": ["zendesk_source", "zendesk", "organization"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique external id to associate organizations to an external record", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_names": {"name": "domain_names", "description": "An array of domain names associated with this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "notes": {"name": "notes", "description": "Any notes you have about the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "New tickets from users in this organization are automatically put in this group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_tickets": {"name": "shared_tickets", "description": "End users in this organization are able to see each other's tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_comments": {"name": "shared_comments", "description": "End users in this organization are able to see each other's comments on tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tags of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_fields": {"name": "organization_fields", "description": "Custom fields for this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_data\"", "created_at": 1728492761.800394}, "source.zendesk_source.zendesk.ticket_comment": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_comment", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_comment", "fqn": ["zendesk_source", "zendesk", "ticket_comment"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_comment_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created": {"name": "created", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "public": {"name": "public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "facebook_comment": {"name": "facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tweet": {"name": "tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "voice_comment": {"name": "voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_comment_data\"", "created_at": 1728492761.800514}, "source.zendesk_source.zendesk.user_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "user_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user_tag", "fqn": ["zendesk_source", "zendesk", "user_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "created_at": 1728492761.8006241}, "source.zendesk_source.zendesk.user": {"database": "postgres", "schema": "zz_zendesk", "name": "user", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user", "fqn": ["zendesk_source", "zendesk", "user"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended": {"name": "suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "created_at": 1728492761.800898}, "source.zendesk_source.zendesk.schedule": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule", "fqn": ["zendesk_source", "zendesk", "schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The support schedules created with different business hours and holidays.", "columns": {"id": {"name": "id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_data\"", "created_at": 1728492761.801013}, "source.zendesk_source.zendesk.ticket_schedule": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_schedule", "fqn": ["zendesk_source", "zendesk", "ticket_schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_schedule_data\"", "created_at": 1728492761.801111}, "source.zendesk_source.zendesk.ticket_form_history": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_form_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_form_history", "fqn": ["zendesk_source", "zendesk", "ticket_form_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_form_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_form_history_data\"", "created_at": 1728492761.801229}, "source.zendesk_source.zendesk.ticket_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_tag", "fqn": ["zendesk_source", "zendesk", "ticket_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_tag_data\"", "created_at": 1728492761.801322}, "source.zendesk_source.zendesk.ticket_field_history": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_field_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_field_history", "fqn": ["zendesk_source", "zendesk", "ticket_field_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_field_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated": {"name": "updated", "description": "The time the ticket field value was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"", "created_at": 1728492761.8014138}, "source.zendesk_source.zendesk.daylight_time": {"database": "postgres", "schema": "zz_zendesk", "name": "daylight_time", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.daylight_time", "fqn": ["zendesk_source", "zendesk", "daylight_time"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "daylight_time_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"daylight_time_data\"", "created_at": 1728492761.801513}, "source.zendesk_source.zendesk.time_zone": {"database": "postgres", "schema": "zz_zendesk", "name": "time_zone", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.time_zone", "fqn": ["zendesk_source", "zendesk", "time_zone"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "time_zone_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"time_zone_data\"", "created_at": 1728492761.801599}, "source.zendesk_source.zendesk.schedule_holiday": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_holiday", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule_holiday", "fqn": ["zendesk_source", "zendesk", "schedule_holiday"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_holiday_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Information about holidays for each specified schedule.", "columns": {"end_date": {"name": "end_date", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "id": {"name": "id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date": {"name": "start_date", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_holiday_data\"", "created_at": 1728492761.801696}}, "macros": {"macro.zendesk_integration_tests.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "zendesk_integration_tests", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.zendesk_integration_tests.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.49316, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.4935522, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.493763, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.493977, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.494117, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.494239, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.495817, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.4961832, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.49685, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.4969769, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5063639, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.506926, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.507224, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.507515, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.507949, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.508363, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5085282, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.508847, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.509205, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.50998, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.510172, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.510478, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.510738, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.511141, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.511352, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.512009, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.512244, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.512365, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5125399, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.512681, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.513246, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5140438, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.514205, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.514507, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.514643, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.514931, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.515996, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }}\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\t{{ ';' if not loop.last else \"\" }}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.516549, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config.model) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.516858, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}{{ ';' if not loop.last else \"\" }}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.517309, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5174599, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5181599, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.518333, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.518466, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5190039, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5191748, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.519386, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.520003, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.523454, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.523631, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.524121, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5245068, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.525536, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.525721, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.525861, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.525996, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.526128, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5264902, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5267699, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.527054, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.527479, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.527742, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.531219, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.53138, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5315871, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.532268, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.532425, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.532585, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.534254, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.535687, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5401, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.54038, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.540538, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.540624, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5407622, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.540875, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.541074, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5419059, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.542086, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.542321, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.542722, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.549417, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.552064, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.55318, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.553589, "supported_languages": null}, "macro.dbt.get_unit_test_sql": {"name": "get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_unit_test_sql", "macro_sql": "{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_unit_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.553868, "supported_languages": null}, "macro.dbt.default__get_unit_test_sql": {"name": "default__get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_unit_test_sql", "macro_sql": "{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n-- Build actual result given inputs\nwith dbt_internal_unit_test_actual as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal(\"actual\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ main_sql }}\n ) _dbt_internal_unit_test_actual\n),\n-- Build expected result\ndbt_internal_unit_test_expected as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal(\"expected\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ expected_fixture_sql }}\n ) _dbt_internal_unit_test_expected\n)\n-- Union actual and expected results\nselect * from dbt_internal_unit_test_actual\nunion all\nselect * from dbt_internal_unit_test_expected\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.554508, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5549939, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5553868, "supported_languages": null}, "macro.dbt.materialization_unit_default": {"name": "materialization_unit_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/unit.sql", "original_file_path": "macros/materializations/tests/unit.sql", "unique_id": "macro.dbt.materialization_unit_default", "macro_sql": "{%- materialization unit, default -%}\n\n {% set relations = [] %}\n\n {% set expected_rows = config.get('expected_rows') %}\n {% set expected_sql = config.get('expected_sql') %}\n {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %}\n {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}\n {%- set column_name_to_data_types = {} -%}\n {%- for column in columns_in_relation -%}\n {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n {%- endfor -%}\n\n {% if not expected_sql %}\n {% set expected_sql = get_expected_sql(expected_rows, column_name_to_data_types) %}\n {% endif %}\n {% set unit_test_sql = get_unit_test_sql(sql, expected_sql, tested_expected_column_names) %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ unit_test_sql }}\n\n {%- endcall %}\n\n {% do adapter.drop_relation(temp_relation) %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_empty_subquery_sql", "macro.dbt.get_expected_sql", "macro.dbt.get_unit_test_sql", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.557184, "supported_languages": ["sql"]}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.562051, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.56243, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.562669, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5642319, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5645282, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.565182, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.568099, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.571157, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.572953, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.573513, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.574379, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5746582, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5754151, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.582355, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5842829, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.584575, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.585524, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.585789, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5864162, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5870569, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.588054, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.588468, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.588723, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.589113, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.589359, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5897012, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.589896, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.590218, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.590416, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.590573, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.590868, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.596176, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.601871, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.603025, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.604406, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6054149, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.605717, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.605846, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.606185, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6063302, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.610382, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6135008, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.618803, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6196392, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6198661, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6203258, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.620508, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6206348, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.62077, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.620883, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.621034, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.621147, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.621593, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.621768, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.622976, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6234188, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.623789, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.624575, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.625002, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6253288, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6257641, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.626014, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.626712, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6270661, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.627254, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6274502, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.627697, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.628494, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.629764, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.630202, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.630513, "supported_languages": null}, "macro.dbt.drop_schema_named": {"name": "drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.drop_schema_named", "macro_sql": "{% macro drop_schema_named(schema_name) %}\n {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema_named"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.630798, "supported_languages": null}, "macro.dbt.default__drop_schema_named": {"name": "default__drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.default__drop_schema_named", "macro_sql": "{% macro default__drop_schema_named(schema_name) %}\n {% set schema_relation = api.Relation.create(schema=schema_name) %}\n {{ adapter.drop_schema(schema_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6310048, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6313112, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.631505, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.632189, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6326, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.632799, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.633064, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.633398, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.633657, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6341162, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6347609, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.635179, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.635405, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{- adapter.dispatch('drop_materialized_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.635669, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.635825, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.636132, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.636371, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.636679, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.636814, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.637085, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.637234, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.637841, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.638026, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6383061, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.638447, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.638721, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.638855, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6398232, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.639939, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.640448, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6406028, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6407318, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.642003, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6423628, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.642689, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{- adapter.dispatch('drop_table', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6429288, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.643029, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6432881, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.643429, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.643687, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.643827, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.644872, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.645076, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6455412, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.646251, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6467059, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.646899, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.647072, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{- adapter.dispatch('drop_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.647342, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.647542, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.648458, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.648632, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.649888, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6500978, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.650317, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.650592, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.650741, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.65115, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6513128, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6514869, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.651891, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.652242, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.652525, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6527622, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.653304, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.654923, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.655521, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.655814, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.658012, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.659348, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.660094, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.660334, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.660579, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6606998, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.661471, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6621141, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.662372, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6627579, "supported_languages": null}, "macro.dbt.date": {"name": "date", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date.sql", "original_file_path": "macros/utils/date.sql", "unique_id": "macro.dbt.date", "macro_sql": "{% macro date(year, month, day) %}\n {{ return(adapter.dispatch('date', 'dbt') (year, month, day)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.663121, "supported_languages": null}, "macro.dbt.default__date": {"name": "default__date", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date.sql", "original_file_path": "macros/utils/date.sql", "unique_id": "macro.dbt.default__date", "macro_sql": "{% macro default__date(year, month, day) -%}\n {%- set dt = modules.datetime.date(year, month, day) -%}\n {%- set iso_8601_formatted_date = dt.strftime('%Y-%m-%d') -%}\n to_date('{{ iso_8601_formatted_date }}', 'YYYY-MM-DD')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6633978, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.663717, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.663875, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.664114, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.664233, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.665059, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.665478, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.66567, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.666178, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6664371, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.666548, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.66705, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.667308, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.667527, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6676009, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.66785, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.667985, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.668266, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.668397, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.66903, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.669419, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6697412, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6698968, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.670167, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.670298, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.670544, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.670696, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6709309, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.671083, "supported_languages": null}, "macro.dbt.cast": {"name": "cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.cast", "macro_sql": "{% macro cast(field, type) %}\n {{ return(adapter.dispatch('cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.671349, "supported_languages": null}, "macro.dbt.default__cast": {"name": "default__cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.default__cast", "macro_sql": "{% macro default__cast(field, type) %}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.671479, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.671713, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6718779, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.672158, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.67229, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.672529, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.672632, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6735868, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.673737, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.673901, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.674048, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6742, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6743429, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.674499, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.674668, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.674828, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6749709, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.675122, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.675261, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6754181, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6755562, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6758258, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.67605, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.676512, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.676634, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.676973, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6772292, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.677367, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.677878, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.678037, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.678268, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.678538, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.678667, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.679028, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.67926, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.679528, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.679659, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6800241, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.680201, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.68035, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.680524, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.680992, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.681362, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6815, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.681597, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.68175, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.681823, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.681977, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.682136, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.682944, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.683072, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.683221, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.683594, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.683771, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6838999, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.684048, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.684166, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6862001, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.686419, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.68673, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.687089, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6875288, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.687926, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.688137, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.688313, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.688641, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.68929, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.689536, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.689684, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.69011, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6905012, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6907809, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.690999, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.69268, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.692799, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.692959, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6930728, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.693405, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.69359, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.69369, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6939049, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.694165, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.694384, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.694565, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6947808, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.695423, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.695601, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6958332, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.69605, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.697414, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6981308, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6983428, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6984901, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6992362, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.699425, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6996422, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.699817, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7000918, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.700754, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.703618, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.703875, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7040808, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.70433, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.704509, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.704662, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.704832, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.705066, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.705257, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7055418, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.705714, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.705868, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7060218, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.706167, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.706562, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.706748, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.709098, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.709257, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7096279, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.709835, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.71003, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.710201, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n {{ cast('null', col['data_type']) }} as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.711315, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.711643, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.711821, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.712151, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.712368, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.712924, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7131672, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.713896, "supported_languages": null}, "macro.dbt.get_fixture_sql": {"name": "get_fixture_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_fixture_sql", "macro_sql": "{% macro get_fixture_sql(rows, column_name_to_data_types) %}\n-- Fixture for {{ model.name }}\n{% set default_row = {} %}\n\n{%- if not column_name_to_data_types -%}\n{#-- Use defer_relation IFF it is available in the manifest and 'this' is missing from the database --#}\n{%- set this_or_defer_relation = defer_relation if (defer_relation and not load_relation(this)) else this -%}\n{%- set columns_in_relation = adapter.get_columns_in_relation(this_or_defer_relation) -%}\n\n{%- set column_name_to_data_types = {} -%}\n{%- for column in columns_in_relation -%}\n{#-- This needs to be a case-insensitive comparison --#}\n{%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n{%- endfor -%}\n{%- endif -%}\n\n{%- if not column_name_to_data_types -%}\n {{ exceptions.raise_compiler_error(\"Not able to get columns for unit test '\" ~ model.name ~ \"' from relation \" ~ this ~ \" because the relation doesn't exist\") }}\n{%- endif -%}\n\n{%- for column_name, column_type in column_name_to_data_types.items() -%}\n {%- do default_row.update({column_name: (safe_cast(\"null\", column_type) | trim )}) -%}\n{%- endfor -%}\n\n\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\n{%- set default_row_copy = default_row.copy() -%}\n{%- do default_row_copy.update(formatted_row) -%}\nselect\n{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n\n{%- if (rows | length) == 0 -%}\n select\n {%- for column_name, column_value in default_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%},{%- endif %}\n {%- endfor %}\n limit 0\n{%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_relation", "macro.dbt.safe_cast", "macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.717149, "supported_languages": null}, "macro.dbt.get_expected_sql": {"name": "get_expected_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_expected_sql", "macro_sql": "{% macro get_expected_sql(rows, column_name_to_data_types) %}\n\n{%- if (rows | length) == 0 -%}\n select * from dbt_internal_unit_test_actual\n limit 0\n{%- else -%}\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\nselect\n{%- for column_name, column_value in formatted_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7179022, "supported_languages": null}, "macro.dbt.format_row": {"name": "format_row", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.format_row", "macro_sql": "\n\n{%- macro format_row(row, column_name_to_data_types) -%}\n {#-- generate case-insensitive formatted row --#}\n {% set formatted_row = {} %}\n {%- for column_name, column_value in row.items() -%}\n {% set column_name = column_name|lower %}\n\n {%- if column_name not in column_name_to_data_types %}\n {#-- if user-provided row contains column name that relation does not contain, raise an error --#}\n {% set fixture_name = \"expected output\" if model.resource_type == 'unit_test' else (\"'\" ~ model.name ~ \"'\") %}\n {{ exceptions.raise_compiler_error(\n \"Invalid column name: '\" ~ column_name ~ \"' in unit test fixture for \" ~ fixture_name ~ \".\"\n \"\\nAccepted columns for \" ~ fixture_name ~ \" are: \" ~ (column_name_to_data_types.keys()|list)\n ) }}\n {%- endif -%}\n\n {%- set column_type = column_name_to_data_types[column_name] %}\n\n {#-- sanitize column_value: wrap yaml strings in quotes, apply cast --#}\n {%- set column_value_clean = column_value -%}\n {%- if column_value is string -%}\n {%- set column_value_clean = dbt.string_literal(dbt.escape_single_quotes(column_value)) -%}\n {%- elif column_value is none -%}\n {%- set column_value_clean = 'null' -%}\n {%- endif -%}\n\n {%- set row_update = {column_name: safe_cast(column_value_clean, column_type) } -%}\n {%- do formatted_row.update(row_update) -%}\n {%- endfor -%}\n {{ return(formatted_row) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.string_literal", "macro.dbt.escape_single_quotes", "macro.dbt.safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7190862, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.720931, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.72108, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.721858, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.722241, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.722791, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.723232, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.723302, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7238212, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.724055, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.724342, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.72462, "supported_languages": null}, "macro.dbt_utils.get_url_host": {"name": "get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.get_url_host", "macro_sql": "{% macro get_url_host(field) -%}\n {{ return(adapter.dispatch('get_url_host', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_host"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7249548, "supported_languages": null}, "macro.dbt_utils.default__get_url_host": {"name": "default__get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.default__get_url_host", "macro_sql": "{% macro default__get_url_host(field) -%}\n\n{%- set parsed =\n dbt.split_part(\n dbt.split_part(\n dbt.replace(\n dbt.replace(\n dbt.replace(field, \"'android-app://'\", \"''\"\n ), \"'http://'\", \"''\"\n ), \"'https://'\", \"''\"\n ), \"'/'\", 1\n ), \"'?'\", 1\n )\n\n-%}\n\n\n {{ dbt.safe_cast(\n parsed,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part", "macro.dbt.replace", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.725408, "supported_languages": null}, "macro.dbt_utils.get_url_path": {"name": "get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.get_url_path", "macro_sql": "{% macro get_url_path(field) -%}\n {{ return(adapter.dispatch('get_url_path', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_path"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.725869, "supported_languages": null}, "macro.dbt_utils.default__get_url_path": {"name": "default__get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.default__get_url_path", "macro_sql": "{% macro default__get_url_path(field) -%}\n\n {%- set stripped_url =\n dbt.replace(\n dbt.replace(field, \"'http://'\", \"''\"), \"'https://'\", \"''\")\n -%}\n\n {%- set first_slash_pos -%}\n coalesce(\n nullif({{ dbt.position(\"'/'\", stripped_url) }}, 0),\n {{ dbt.position(\"'?'\", stripped_url) }} - 1\n )\n {%- endset -%}\n\n {%- set parsed_path =\n dbt.split_part(\n dbt.right(\n stripped_url,\n dbt.length(stripped_url) ~ \"-\" ~ first_slash_pos\n ),\n \"'?'\", 1\n )\n -%}\n\n {{ dbt.safe_cast(\n parsed_path,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.position", "macro.dbt.split_part", "macro.dbt.right", "macro.dbt.length", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.72662, "supported_languages": null}, "macro.dbt_utils.get_url_parameter": {"name": "get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.get_url_parameter", "macro_sql": "{% macro get_url_parameter(field, url_parameter) -%}\n {{ return(adapter.dispatch('get_url_parameter', 'dbt_utils')(field, url_parameter)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.726998, "supported_languages": null}, "macro.dbt_utils.default__get_url_parameter": {"name": "default__get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.default__get_url_parameter", "macro_sql": "{% macro default__get_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"='\" -%}\n\n{%- set split = dbt.split_part(dbt.split_part(field, formatted_url_parameter, 2), \"'&'\", 1) -%}\n\nnullif({{ split }},'')\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.727325, "supported_languages": null}, "macro.dbt_utils.test_fewer_rows_than": {"name": "test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.test_fewer_rows_than", "macro_sql": "{% test fewer_rows_than(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_fewer_rows_than', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_fewer_rows_than"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7284968, "supported_languages": null}, "macro.dbt_utils.default__test_fewer_rows_than": {"name": "default__test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.default__test_fewer_rows_than", "macro_sql": "{% macro default__test_fewer_rows_than(model, compare_model, group_by_columns) %}\n\n{{ config(fail_calc = 'sum(coalesce(row_count_delta, 0))') }}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in equal_rowcount. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_fewer_rows_than'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_our_model \n from {{ model }}\n {{ groupby_gb_cols }}\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_comparison_model \n from {{ compare_model }}\n {{ groupby_gb_cols }}\n\n),\ncounts as (\n\n select\n\n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_our_model,\n count_comparison_model\n from a\n full join b on \n a.id_dbtutils_test_fewer_rows_than = b.id_dbtutils_test_fewer_rows_than\n {{ join_gb_cols }}\n\n),\nfinal as (\n\n select *,\n case\n -- fail the test if we have more rows than the reference model and return the row count delta\n when count_our_model > count_comparison_model then (count_our_model - count_comparison_model)\n -- fail the test if they are the same number\n when count_our_model = count_comparison_model then 1\n -- pass the test if the delta is positive (i.e. return the number 0)\n else 0\n end as row_count_delta\n from counts\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.729648, "supported_languages": null}, "macro.dbt_utils.test_equal_rowcount": {"name": "test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.test_equal_rowcount", "macro_sql": "{% test equal_rowcount(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_equal_rowcount', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equal_rowcount"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7306361, "supported_languages": null}, "macro.dbt_utils.default__test_equal_rowcount": {"name": "default__test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.default__test_equal_rowcount", "macro_sql": "{% macro default__test_equal_rowcount(model, compare_model, group_by_columns) %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = 'sum(coalesce(diff_count, 0))') }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(', ') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in fewer_rows_than. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_equal_rowcount'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_a \n from {{ model }}\n {{groupby_gb_cols}}\n\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_b \n from {{ compare_model }}\n {{groupby_gb_cols}}\n\n),\nfinal as (\n\n select\n \n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_a,\n count_b,\n abs(count_a - count_b) as diff_count\n\n from a\n full join b\n on\n a.id_dbtutils_test_equal_rowcount = b.id_dbtutils_test_equal_rowcount\n {{join_gb_cols}}\n\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.731723, "supported_languages": null}, "macro.dbt_utils.test_relationships_where": {"name": "test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.test_relationships_where", "macro_sql": "{% test relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n {{ return(adapter.dispatch('test_relationships_where', 'dbt_utils')(model, column_name, to, field, from_condition, to_condition)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_relationships_where"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.73248, "supported_languages": null}, "macro.dbt_utils.default__test_relationships_where": {"name": "default__test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.default__test_relationships_where", "macro_sql": "{% macro default__test_relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n\n{# T-SQL has no boolean data type so we use 1=1 which returns TRUE #}\n{# ref https://stackoverflow.com/a/7170753/3842610 #}\n\nwith left_table as (\n\n select\n {{column_name}} as id\n\n from {{model}}\n\n where {{column_name}} is not null\n and {{from_condition}}\n\n),\n\nright_table as (\n\n select\n {{field}} as id\n\n from {{to}}\n\n where {{field}} is not null\n and {{to_condition}}\n\n),\n\nexceptions as (\n\n select\n left_table.id,\n right_table.id as right_id\n\n from left_table\n\n left join right_table\n on left_table.id = right_table.id\n\n where right_table.id is null\n\n)\n\nselect * from exceptions\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7328472, "supported_languages": null}, "macro.dbt_utils.test_recency": {"name": "test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.test_recency", "macro_sql": "{% test recency(model, field, datepart, interval, ignore_time_component=False, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_recency', 'dbt_utils')(model, field, datepart, interval, ignore_time_component, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_recency"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.733557, "supported_languages": null}, "macro.dbt_utils.default__test_recency": {"name": "default__test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.default__test_recency", "macro_sql": "{% macro default__test_recency(model, field, datepart, interval, ignore_time_component, group_by_columns) %}\n\n{% set threshold = 'cast(' ~ dbt.dateadd(datepart, interval * -1, dbt.current_timestamp()) ~ ' as ' ~ ('date' if ignore_time_component else dbt.type_timestamp()) ~ ')' %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nwith recency as (\n\n select \n\n {{ select_gb_cols }}\n {% if ignore_time_component %}\n cast(max({{ field }}) as date) as most_recent\n {%- else %}\n max({{ field }}) as most_recent\n {%- endif %}\n\n from {{ model }}\n\n {{ groupby_gb_cols }}\n\n)\n\nselect\n\n {{ select_gb_cols }}\n most_recent,\n {{ threshold }} as threshold\n\nfrom recency\nwhere most_recent < {{ threshold }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.current_timestamp", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.734348, "supported_languages": null}, "macro.dbt_utils.test_not_constant": {"name": "test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.test_not_constant", "macro_sql": "{% test not_constant(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_constant', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_constant"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.734787, "supported_languages": null}, "macro.dbt_utils.default__test_not_constant": {"name": "default__test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.default__test_not_constant", "macro_sql": "{% macro default__test_not_constant(model, column_name, group_by_columns) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nselect\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count(distinct {{ column_name }}) as filler_column\n\nfrom {{ model }}\n\n {{groupby_gb_cols}}\n\nhaving count(distinct {{ column_name }}) = 1\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.735214, "supported_languages": null}, "macro.dbt_utils.test_accepted_range": {"name": "test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.test_accepted_range", "macro_sql": "{% test accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n {{ return(adapter.dispatch('test_accepted_range', 'dbt_utils')(model, column_name, min_value, max_value, inclusive)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_accepted_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.735799, "supported_languages": null}, "macro.dbt_utils.default__test_accepted_range": {"name": "default__test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.default__test_accepted_range", "macro_sql": "{% macro default__test_accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n\nwith meet_condition as(\n select *\n from {{ model }}\n),\n\nvalidation_errors as (\n select *\n from meet_condition\n where\n -- never true, defaults to an empty result set. Exists to ensure any combo of the `or` clauses below succeeds\n 1 = 2\n\n {%- if min_value is not none %}\n -- records with a value >= min_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} > {{- \"=\" if inclusive }} {{ min_value }}\n {%- endif %}\n\n {%- if max_value is not none %}\n -- records with a value <= max_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} < {{- \"=\" if inclusive }} {{ max_value }}\n {%- endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.736253, "supported_languages": null}, "macro.dbt_utils.test_not_accepted_values": {"name": "test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.test_not_accepted_values", "macro_sql": "{% test not_accepted_values(model, column_name, values, quote=True) %}\n {{ return(adapter.dispatch('test_not_accepted_values', 'dbt_utils')(model, column_name, values, quote)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7370298, "supported_languages": null}, "macro.dbt_utils.default__test_not_accepted_values": {"name": "default__test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.default__test_not_accepted_values", "macro_sql": "{% macro default__test_not_accepted_values(model, column_name, values, quote=True) %}\nwith all_values as (\n\n select distinct\n {{ column_name }} as value_field\n\n from {{ model }}\n\n),\n\nvalidation_errors as (\n\n select\n value_field\n\n from all_values\n where value_field in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n )\n\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7374878, "supported_languages": null}, "macro.dbt_utils.test_at_least_one": {"name": "test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.test_at_least_one", "macro_sql": "{% test at_least_one(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_at_least_one', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_at_least_one"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.738166, "supported_languages": null}, "macro.dbt_utils.default__test_at_least_one": {"name": "default__test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.default__test_at_least_one", "macro_sql": "{% macro default__test_at_least_one(model, column_name, group_by_columns) %}\n\n{% set pruned_cols = [column_name] %}\n\n{% if group_by_columns|length() > 0 %}\n\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n {% set pruned_cols = group_by_columns %}\n\n {% if column_name not in pruned_cols %}\n {% do pruned_cols.append(column_name) %}\n {% endif %}\n\n{% endif %}\n\n{% set select_pruned_cols = pruned_cols|join(' ,') %}\n\nselect *\nfrom (\n with pruned_rows as (\n select\n {{ select_pruned_cols }}\n from {{ model }}\n {% if group_by_columns|length() == 0 %}\n where {{ column_name }} is not null\n limit 1\n {% endif %}\n )\n select\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count({{ column_name }}) as filler_column\n\n from pruned_rows\n\n {{groupby_gb_cols}}\n\n having count({{ column_name }}) = 0\n\n) validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.739041, "supported_languages": null}, "macro.dbt_utils.test_unique_combination_of_columns": {"name": "test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.test_unique_combination_of_columns", "macro_sql": "{% test unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n {{ return(adapter.dispatch('test_unique_combination_of_columns', 'dbt_utils')(model, combination_of_columns, quote_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_unique_combination_of_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.73974, "supported_languages": null}, "macro.dbt_utils.default__test_unique_combination_of_columns": {"name": "default__test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.default__test_unique_combination_of_columns", "macro_sql": "{% macro default__test_unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n\n{% if not quote_columns %}\n {%- set column_list=combination_of_columns %}\n{% elif quote_columns %}\n {%- set column_list=[] %}\n {% for column in combination_of_columns -%}\n {% set column_list = column_list.append( adapter.quote(column) ) %}\n {%- endfor %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`quote_columns` argument for unique_combination_of_columns test must be one of [True, False] Got: '\" ~ quote ~\"'.'\"\n ) }}\n{% endif %}\n\n{%- set columns_csv=column_list | join(', ') %}\n\n\nwith validation_errors as (\n\n select\n {{ columns_csv }}\n from {{ model }}\n group by {{ columns_csv }}\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.740489, "supported_languages": null}, "macro.dbt_utils.test_cardinality_equality": {"name": "test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.test_cardinality_equality", "macro_sql": "{% test cardinality_equality(model, column_name, to, field) %}\n {{ return(adapter.dispatch('test_cardinality_equality', 'dbt_utils')(model, column_name, to, field)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_cardinality_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.741108, "supported_languages": null}, "macro.dbt_utils.default__test_cardinality_equality": {"name": "default__test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.default__test_cardinality_equality", "macro_sql": "{% macro default__test_cardinality_equality(model, column_name, to, field) %}\n\n{# T-SQL does not let you use numbers as aliases for columns #}\n{# Thus, no \"GROUP BY 1\" #}\n\nwith table_a as (\nselect\n {{ column_name }},\n count(*) as num_rows\nfrom {{ model }}\ngroup by {{ column_name }}\n),\n\ntable_b as (\nselect\n {{ field }},\n count(*) as num_rows\nfrom {{ to }}\ngroup by {{ field }}\n),\n\nexcept_a as (\n select *\n from table_a\n {{ dbt.except() }}\n select *\n from table_b\n),\n\nexcept_b as (\n select *\n from table_b\n {{ dbt.except() }}\n select *\n from table_a\n),\n\nunioned as (\n select *\n from except_a\n union all\n select *\n from except_b\n)\n\nselect *\nfrom unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7414489, "supported_languages": null}, "macro.dbt_utils.test_expression_is_true": {"name": "test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.test_expression_is_true", "macro_sql": "{% test expression_is_true(model, expression, column_name=None) %}\n {{ return(adapter.dispatch('test_expression_is_true', 'dbt_utils')(model, expression, column_name)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_expression_is_true"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7418408, "supported_languages": null}, "macro.dbt_utils.default__test_expression_is_true": {"name": "default__test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.default__test_expression_is_true", "macro_sql": "{% macro default__test_expression_is_true(model, expression, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else \"1\" %}\n\nselect\n {{ column_list }}\nfrom {{ model }}\n{% if column_name is none %}\nwhere not({{ expression }})\n{%- else %}\nwhere not({{ column_name }} {{ expression }})\n{%- endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.742188, "supported_languages": null}, "macro.dbt_utils.test_not_null_proportion": {"name": "test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.test_not_null_proportion", "macro_sql": "{% macro test_not_null_proportion(model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_null_proportion', 'dbt_utils')(model, group_by_columns, **kwargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_null_proportion"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.742846, "supported_languages": null}, "macro.dbt_utils.default__test_not_null_proportion": {"name": "default__test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.default__test_not_null_proportion", "macro_sql": "{% macro default__test_not_null_proportion(model, group_by_columns) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('arg')) %}\n{% set at_least = kwargs.get('at_least', kwargs.get('arg')) %}\n{% set at_most = kwargs.get('at_most', kwargs.get('arg', 1)) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith validation as (\n select\n {{select_gb_cols}}\n sum(case when {{ column_name }} is null then 0 else 1 end) / cast(count(*) as {{ dbt.type_numeric() }}) as not_null_proportion\n from {{ model }}\n {{groupby_gb_cols}}\n),\nvalidation_errors as (\n select\n {{select_gb_cols}}\n not_null_proportion\n from validation\n where not_null_proportion < {{ at_least }} or not_null_proportion > {{ at_most }}\n)\nselect\n *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.743659, "supported_languages": null}, "macro.dbt_utils.test_sequential_values": {"name": "test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.test_sequential_values", "macro_sql": "{% test sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n {{ return(adapter.dispatch('test_sequential_values', 'dbt_utils')(model, column_name, interval, datepart, group_by_columns)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_sequential_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7445571, "supported_languages": null}, "macro.dbt_utils.default__test_sequential_values": {"name": "default__test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.default__test_sequential_values", "macro_sql": "{% macro default__test_sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n{% set previous_column_name = \"previous_\" ~ dbt_utils.slugify(column_name) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(',') + ', ' %}\n {% set partition_gb_cols = 'partition by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith windowed as (\n\n select\n {{ select_gb_cols }}\n {{ column_name }},\n lag({{ column_name }}) over (\n {{partition_gb_cols}}\n order by {{ column_name }}\n ) as {{ previous_column_name }}\n from {{ model }}\n),\n\nvalidation_errors as (\n select\n *\n from windowed\n {% if datepart %}\n where not(cast({{ column_name }} as {{ dbt.type_timestamp() }})= cast({{ dbt.dateadd(datepart, interval, previous_column_name) }} as {{ dbt.type_timestamp() }}))\n {% else %}\n where not({{ column_name }} = {{ previous_column_name }} + {{ interval }})\n {% endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.slugify", "macro.dbt.type_timestamp", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.745416, "supported_languages": null}, "macro.dbt_utils.test_equality": {"name": "test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.test_equality", "macro_sql": "{% test equality(model, compare_model, compare_columns=None, exclude_columns=None, precision = None) %}\n {{ return(adapter.dispatch('test_equality', 'dbt_utils')(model, compare_model, compare_columns, exclude_columns, precision)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7476249, "supported_languages": null}, "macro.dbt_utils.default__test_equality": {"name": "default__test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.default__test_equality", "macro_sql": "{% macro default__test_equality(model, compare_model, compare_columns=None, exclude_columns=None, precision = None) %}\n\n{%- if compare_columns and exclude_columns -%}\n {{ exceptions.raise_compiler_error(\"Both a compare and an ignore list were provided to the `equality` macro. Only one is allowed\") }}\n{%- endif -%}\n\n{% set set_diff %}\n count(*) + coalesce(abs(\n sum(case when which_diff = 'a_minus_b' then 1 else 0 end) -\n sum(case when which_diff = 'b_minus_a' then 1 else 0 end)\n ), 0)\n{% endset %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = set_diff) }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n\n\n-- setup\n{%- do dbt_utils._is_relation(model, 'test_equality') -%}\n\n{# Ensure there are no extra columns in the compare_model vs model #}\n{%- if not compare_columns -%}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- do dbt_utils._is_ephemeral(compare_model, 'test_equality') -%}\n\n {%- set model_columns = adapter.get_columns_in_relation(model) -%}\n {%- set compare_model_columns = adapter.get_columns_in_relation(compare_model) -%}\n\n\n {%- if exclude_columns -%}\n {#-- Lower case ignore columns for easier comparison --#}\n {%- set exclude_columns = exclude_columns | map(\"lower\") | list %}\n\n {# Filter out the excluded columns #}\n {%- set include_columns = [] %}\n {%- set include_model_columns = [] %}\n {%- for column in model_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n {%- for column in compare_model_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_model_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n\n {%- set compare_columns_set = set(include_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- set compare_model_columns_set = set(include_model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- else -%}\n {%- set compare_columns_set = set(model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- set compare_model_columns_set = set(compare_model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- endif -%}\n\n {% if compare_columns_set != compare_model_columns_set %}\n {{ exceptions.raise_compiler_error(compare_model ~\" has less columns than \" ~ model ~ \", please ensure they have the same columns or use the `compare_columns` or `exclude_columns` arguments to subset them.\") }}\n {% endif %}\n\n\n{% endif %}\n\n{%- if not precision -%}\n {%- if not compare_columns -%}\n {# \n You cannot get the columns in an ephemeral model (due to not existing in the information schema),\n so if the user does not provide an explicit list of columns we must error in the case it is ephemeral\n #}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- set compare_columns = adapter.get_columns_in_relation(model)-%}\n\n {%- if exclude_columns -%}\n {#-- Lower case ignore columns for easier comparison --#}\n {%- set exclude_columns = exclude_columns | map(\"lower\") | list %}\n\n {# Filter out the excluded columns #}\n {%- set include_columns = [] %}\n {%- for column in compare_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n\n {%- set compare_columns = include_columns | map(attribute='quoted') %}\n {%- else -%} {# Compare columns provided #}\n {%- set compare_columns = compare_columns | map(attribute='quoted') %}\n {%- endif -%}\n {%- endif -%}\n\n {% set compare_cols_csv = compare_columns | join(', ') %}\n\n{% else %} {# Precision required #}\n {#-\n If rounding is required, we need to get the types, so it cannot be ephemeral even if they provide column names\n -#}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- set columns = adapter.get_columns_in_relation(model) -%}\n\n {% set columns_list = [] %}\n {%- for col in columns -%}\n {%- if (\n (col.name|lower in compare_columns|map('lower') or not compare_columns) and\n (col.name|lower not in exclude_columns|map('lower') or not exclude_columns)\n ) -%}\n {# Databricks double type is not picked up by any number type checks in dbt #}\n {%- if col.is_float() or col.is_numeric() or col.data_type == 'double' -%}\n {# Cast is required due to postgres not having round for a double precision number #}\n {%- do columns_list.append('round(cast(' ~ col.quoted ~ ' as ' ~ dbt.type_numeric() ~ '),' ~ precision ~ ') as ' ~ col.quoted) -%}\n {%- else -%} {# Non-numeric type #}\n {%- do columns_list.append(col.quoted) -%}\n {%- endif -%}\n {% endif %}\n {%- endfor -%}\n\n {% set compare_cols_csv = columns_list | join(', ') %}\n\n{% endif %}\n\nwith a as (\n\n select * from {{ model }}\n\n),\n\nb as (\n\n select * from {{ compare_model }}\n\n),\n\na_minus_b as (\n\n select {{compare_cols_csv}} from a\n {{ dbt.except() }}\n select {{compare_cols_csv}} from b\n\n),\n\nb_minus_a as (\n\n select {{compare_cols_csv}} from b\n {{ dbt.except() }}\n select {{compare_cols_csv}} from a\n\n),\n\nunioned as (\n\n select 'a_minus_b' as which_diff, a_minus_b.* from a_minus_b\n union all\n select 'b_minus_a' as which_diff, b_minus_a.* from b_minus_a\n\n)\n\nselect * from unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.type_numeric", "macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.751712, "supported_languages": null}, "macro.dbt_utils.test_not_empty_string": {"name": "test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.test_not_empty_string", "macro_sql": "{% test not_empty_string(model, column_name, trim_whitespace=true) %}\n\n {{ return(adapter.dispatch('test_not_empty_string', 'dbt_utils')(model, column_name, trim_whitespace)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_empty_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7522519, "supported_languages": null}, "macro.dbt_utils.default__test_not_empty_string": {"name": "default__test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.default__test_not_empty_string", "macro_sql": "{% macro default__test_not_empty_string(model, column_name, trim_whitespace=true) %}\n\n with\n \n all_values as (\n\n select \n\n\n {% if trim_whitespace == true -%}\n\n trim({{ column_name }}) as {{ column_name }}\n\n {%- else -%}\n\n {{ column_name }}\n\n {%- endif %}\n \n from {{ model }}\n\n ),\n\n errors as (\n\n select * from all_values\n where {{ column_name }} = ''\n\n )\n\n select * from errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.752539, "supported_languages": null}, "macro.dbt_utils.test_mutually_exclusive_ranges": {"name": "test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.test_mutually_exclusive_ranges", "macro_sql": "{% test mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n {{ return(adapter.dispatch('test_mutually_exclusive_ranges', 'dbt_utils')(model, lower_bound_column, upper_bound_column, partition_by, gaps, zero_length_range_allowed)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_mutually_exclusive_ranges"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.756105, "supported_languages": null}, "macro.dbt_utils.default__test_mutually_exclusive_ranges": {"name": "default__test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.default__test_mutually_exclusive_ranges", "macro_sql": "{% macro default__test_mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n{% if gaps == 'not_allowed' %}\n {% set allow_gaps_operator='=' %}\n {% set allow_gaps_operator_in_words='equal_to' %}\n{% elif gaps == 'allowed' %}\n {% set allow_gaps_operator='<=' %}\n {% set allow_gaps_operator_in_words='less_than_or_equal_to' %}\n{% elif gaps == 'required' %}\n {% set allow_gaps_operator='<' %}\n {% set allow_gaps_operator_in_words='less_than' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`gaps` argument for mutually_exclusive_ranges test must be one of ['not_allowed', 'allowed', 'required'] Got: '\" ~ gaps ~\"'.'\"\n ) }}\n{% endif %}\n{% if not zero_length_range_allowed %}\n {% set allow_zero_length_operator='<' %}\n {% set allow_zero_length_operator_in_words='less_than' %}\n{% elif zero_length_range_allowed %}\n {% set allow_zero_length_operator='<=' %}\n {% set allow_zero_length_operator_in_words='less_than_or_equal_to' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`zero_length_range_allowed` argument for mutually_exclusive_ranges test must be one of [true, false] Got: '\" ~ zero_length_range_allowed ~\"'.'\"\n ) }}\n{% endif %}\n\n{% set partition_clause=\"partition by \" ~ partition_by if partition_by else '' %}\n\nwith window_functions as (\n\n select\n {% if partition_by %}\n {{ partition_by }} as partition_by_col,\n {% endif %}\n {{ lower_bound_column }} as lower_bound,\n {{ upper_bound_column }} as upper_bound,\n\n lead({{ lower_bound_column }}) over (\n {{ partition_clause }}\n order by {{ lower_bound_column }}, {{ upper_bound_column }}\n ) as next_lower_bound,\n\n row_number() over (\n {{ partition_clause }}\n order by {{ lower_bound_column }} desc, {{ upper_bound_column }} desc\n ) = 1 as is_last_record\n\n from {{ model }}\n\n),\n\ncalc as (\n -- We want to return records where one of our assumptions fails, so we'll use\n -- the `not` function with `and` statements so we can write our assumptions more cleanly\n select\n *,\n\n -- For each record: lower_bound should be < upper_bound.\n -- Coalesce it to return an error on the null case (implicit assumption\n -- these columns are not_null)\n coalesce(\n lower_bound {{ allow_zero_length_operator }} upper_bound,\n false\n ) as lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound,\n\n -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound.\n -- Coalesce it to handle null cases for the last record.\n coalesce(\n upper_bound {{ allow_gaps_operator }} next_lower_bound,\n is_last_record,\n false\n ) as upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n\n from window_functions\n\n),\n\nvalidation_errors as (\n\n select\n *\n from calc\n\n where not(\n -- THE FOLLOWING SHOULD BE TRUE --\n lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound\n and upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n )\n)\n\nselect * from validation_errors\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.757956, "supported_languages": null}, "macro.dbt_utils.pretty_log_format": {"name": "pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.pretty_log_format", "macro_sql": "{% macro pretty_log_format(message) %}\n {{ return(adapter.dispatch('pretty_log_format', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7583182, "supported_languages": null}, "macro.dbt_utils.default__pretty_log_format": {"name": "default__pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.default__pretty_log_format", "macro_sql": "{% macro default__pretty_log_format(message) %}\n {{ return( dbt_utils.pretty_time() ~ ' + ' ~ message) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.75849, "supported_languages": null}, "macro.dbt_utils._is_relation": {"name": "_is_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_relation.sql", "original_file_path": "macros/jinja_helpers/_is_relation.sql", "unique_id": "macro.dbt_utils._is_relation", "macro_sql": "{% macro _is_relation(obj, macro) %}\n {%- if not (obj is mapping and obj.get('metadata', {}).get('type', '').endswith('Relation')) -%}\n {%- do exceptions.raise_compiler_error(\"Macro \" ~ macro ~ \" expected a Relation but received the value: \" ~ obj) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.758945, "supported_languages": null}, "macro.dbt_utils.pretty_time": {"name": "pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.pretty_time", "macro_sql": "{% macro pretty_time(format='%H:%M:%S') %}\n {{ return(adapter.dispatch('pretty_time', 'dbt_utils')(format)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.759223, "supported_languages": null}, "macro.dbt_utils.default__pretty_time": {"name": "default__pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.default__pretty_time", "macro_sql": "{% macro default__pretty_time(format='%H:%M:%S') %}\n {{ return(modules.datetime.datetime.now().strftime(format)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.759409, "supported_languages": null}, "macro.dbt_utils.log_info": {"name": "log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.log_info", "macro_sql": "{% macro log_info(message) %}\n {{ return(adapter.dispatch('log_info', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__log_info"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.759661, "supported_languages": null}, "macro.dbt_utils.default__log_info": {"name": "default__log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.default__log_info", "macro_sql": "{% macro default__log_info(message) %}\n {{ log(dbt_utils.pretty_log_format(message), info=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7598348, "supported_languages": null}, "macro.dbt_utils.slugify": {"name": "slugify", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/slugify.sql", "original_file_path": "macros/jinja_helpers/slugify.sql", "unique_id": "macro.dbt_utils.slugify", "macro_sql": "{% macro slugify(string) %}\n\n{% if not string %}\n{{ return('') }}\n{% endif %}\n\n{#- Lower case the string -#}\n{% set string = string | lower %}\n{#- Replace spaces and dashes with underscores -#}\n{% set string = modules.re.sub('[ -]+', '_', string) %}\n{#- Only take letters, numbers, and underscores -#}\n{% set string = modules.re.sub('[^a-z0-9_]+', '', string) %}\n{#- Prepends \"_\" if string begins with a number -#}\n{% set string = modules.re.sub('^[0-9]', '_' + string[0], string) %}\n\n{{ return(string) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.760593, "supported_languages": null}, "macro.dbt_utils._is_ephemeral": {"name": "_is_ephemeral", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_ephemeral.sql", "original_file_path": "macros/jinja_helpers/_is_ephemeral.sql", "unique_id": "macro.dbt_utils._is_ephemeral", "macro_sql": "{% macro _is_ephemeral(obj, macro) %}\n {%- if obj.is_cte -%}\n {% set ephemeral_prefix = api.Relation.add_ephemeral_prefix('') %}\n {% if obj.name.startswith(ephemeral_prefix) %}\n {% set model_name = obj.name[(ephemeral_prefix|length):] %}\n {% else %}\n {% set model_name = obj.name %}\n {%- endif -%}\n {% set error_message %}\nThe `{{ macro }}` macro cannot be used with ephemeral models, as it relies on the information schema.\n\n`{{ model_name }}` is an ephemeral model. Consider making it a view or table instead.\n {% endset %}\n {%- do exceptions.raise_compiler_error(error_message) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7614138, "supported_languages": null}, "macro.dbt_utils.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_utils')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.762176, "supported_languages": null}, "macro.dbt_utils.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.762826, "supported_languages": null}, "macro.dbt_utils.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_utils')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.763065, "supported_languages": null}, "macro.dbt_utils.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{dbt_utils.generate_series(\n dbt_utils.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.generate_series", "macro.dbt_utils.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.763436, "supported_languages": null}, "macro.dbt_utils.safe_subtract": {"name": "safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.safe_subtract", "macro_sql": "{%- macro safe_subtract(field_list) -%}\n {{ return(adapter.dispatch('safe_subtract', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_subtract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7638478, "supported_languages": null}, "macro.dbt_utils.default__safe_subtract": {"name": "default__safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.default__safe_subtract", "macro_sql": "\n\n{%- macro default__safe_subtract(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_subtract` macro takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' -\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.764514, "supported_languages": null}, "macro.dbt_utils.nullcheck_table": {"name": "nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.nullcheck_table", "macro_sql": "{% macro nullcheck_table(relation) %}\n {{ return(adapter.dispatch('nullcheck_table', 'dbt_utils')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7648509, "supported_languages": null}, "macro.dbt_utils.default__nullcheck_table": {"name": "default__nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.default__nullcheck_table", "macro_sql": "{% macro default__nullcheck_table(relation) %}\n\n {%- do dbt_utils._is_relation(relation, 'nullcheck_table') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'nullcheck_table') -%}\n {% set cols = adapter.get_columns_in_relation(relation) %}\n\n select {{ dbt_utils.nullcheck(cols) }}\n from {{relation}}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.765196, "supported_languages": null}, "macro.dbt_utils.get_relations_by_pattern": {"name": "get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.get_relations_by_pattern", "macro_sql": "{% macro get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_pattern', 'dbt_utils')(schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7659352, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_pattern": {"name": "default__get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_pattern", "macro_sql": "{% macro default__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.766989, "supported_languages": null}, "macro.dbt_utils.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.768021, "supported_languages": null}, "macro.dbt_utils.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7684429, "supported_languages": null}, "macro.dbt_utils.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.768638, "supported_languages": null}, "macro.dbt_utils.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_utils.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7691898, "supported_languages": null}, "macro.dbt_utils.get_relations_by_prefix": {"name": "get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.get_relations_by_prefix", "macro_sql": "{% macro get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_prefix', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.769943, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_prefix": {"name": "default__get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_prefix", "macro_sql": "{% macro default__get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_prefix_sql(schema, prefix, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.770859, "supported_languages": null}, "macro.dbt_utils.get_tables_by_prefix_sql": {"name": "get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_prefix_sql", "macro_sql": "{% macro get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_prefix_sql', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.771272, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_prefix_sql": {"name": "default__get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_prefix_sql", "macro_sql": "{% macro default__get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(\n schema_pattern = schema,\n table_pattern = prefix ~ '%',\n exclude = exclude,\n database = database\n ) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.77155, "supported_languages": null}, "macro.dbt_utils.star": {"name": "star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.star", "macro_sql": "{% macro star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {{ return(adapter.dispatch('star', 'dbt_utils')(from, relation_alias, except, prefix, suffix, quote_identifiers)) }}\r\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__star"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.772904, "supported_languages": null}, "macro.dbt_utils.default__star": {"name": "default__star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.default__star", "macro_sql": "{% macro default__star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {%- do dbt_utils._is_relation(from, 'star') -%}\r\n {%- do dbt_utils._is_ephemeral(from, 'star') -%}\r\n\r\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\r\n {%- if not execute -%}\r\n {% do return('*') %}\r\n {%- endif -%}\r\n\r\n {% set cols = dbt_utils.get_filtered_columns_in_relation(from, except) %}\r\n\r\n {%- if cols|length <= 0 -%}\r\n {% if flags.WHICH == 'compile' %}\r\n {% set response %}\r\n*\r\n/* No columns were returned. Maybe the relation doesn't exist yet \r\nor all columns were excluded. This star is only output during \r\ndbt compile, and exists to keep SQLFluff happy. */\r\n {% endset %}\r\n {% do return(response) %}\r\n {% else %}\r\n {% do return(\"/* no columns returned from star() macro */\") %}\r\n {% endif %}\r\n {%- else -%}\r\n {%- for col in cols %}\r\n {%- if relation_alias %}{{ relation_alias }}.{% else %}{%- endif -%}\r\n {%- if quote_identifiers -%}\r\n {{ adapter.quote(col)|trim }} {%- if prefix!='' or suffix!='' %} as {{ adapter.quote(prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {%- else -%}\r\n {{ col|trim }} {%- if prefix!='' or suffix!='' %} as {{ (prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {% endif %}\r\n {%- if not loop.last %},{{ '\\n ' }}{%- endif -%}\r\n {%- endfor -%}\r\n {% endif %}\r\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.774365, "supported_languages": null}, "macro.dbt_utils.unpivot": {"name": "unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.unpivot", "macro_sql": "{% macro unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value', quote_identifiers=False) -%}\n {{ return(adapter.dispatch('unpivot', 'dbt_utils')(relation, cast_to, exclude, remove, field_name, value_name, quote_identifiers)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__unpivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.775907, "supported_languages": null}, "macro.dbt_utils.default__unpivot": {"name": "default__unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.default__unpivot", "macro_sql": "{% macro default__unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value', quote_identifiers=False) -%}\n\n {% if not relation %}\n {{ exceptions.raise_compiler_error(\"Error: argument `relation` is required for `unpivot` macro.\") }}\n {% endif %}\n\n {%- set exclude = exclude if exclude is not none else [] %}\n {%- set remove = remove if remove is not none else [] %}\n\n {%- set include_cols = [] %}\n\n {%- set table_columns = {} %}\n\n {%- do table_columns.update({relation: []}) %}\n\n {%- do dbt_utils._is_relation(relation, 'unpivot') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'unpivot') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) %}\n\n {%- for col in cols -%}\n {%- if col.column.lower() not in remove|map('lower') and col.column.lower() not in exclude|map('lower') -%}\n {% do include_cols.append(col) %}\n {%- endif %}\n {%- endfor %}\n\n\n {%- for col in include_cols -%}\n {%- set current_col_name = adapter.quote(col.column) if quote_identifiers else col.column -%}\n select\n {%- for exclude_col in exclude %}\n {{ adapter.quote(exclude_col) if quote_identifiers else exclude_col }},\n {%- endfor %}\n\n cast('{{ col.column }}' as {{ dbt.type_string() }}) as {{ adapter.quote(field_name) if quote_identifiers else field_name }},\n cast( {% if col.data_type == 'boolean' %}\n {{ dbt.cast_bool_to_text(current_col_name) }}\n {% else %}\n {{ current_col_name }}\n {% endif %}\n as {{ cast_to }}) as {{ adapter.quote(value_name) if quote_identifiers else value_name }}\n\n from {{ relation }}\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n {%- endfor -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.type_string", "macro.dbt.cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.777925, "supported_languages": null}, "macro.dbt_utils.safe_divide": {"name": "safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.safe_divide", "macro_sql": "{% macro safe_divide(numerator, denominator) -%}\n {{ return(adapter.dispatch('safe_divide', 'dbt_utils')(numerator, denominator)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_divide"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7782779, "supported_languages": null}, "macro.dbt_utils.default__safe_divide": {"name": "default__safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.default__safe_divide", "macro_sql": "{% macro default__safe_divide(numerator, denominator) %}\n ( {{ numerator }} ) / nullif( ( {{ denominator }} ), 0)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7784162, "supported_languages": null}, "macro.dbt_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n {{ return(adapter.dispatch('union_relations', 'dbt_utils')(relations, column_override, include, exclude, source_column_name, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.781806, "supported_languages": null}, "macro.dbt_utils.default__union_relations": {"name": "default__union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.default__union_relations", "macro_sql": "\n\n{%- macro default__union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n {%- set all_excludes = [] -%}\n {%- set all_includes = [] -%}\n\n {%- if exclude -%}\n {%- for exc in exclude -%}\n {%- do all_excludes.append(exc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- if include -%}\n {%- for inc in include -%}\n {%- do all_includes.append(inc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column | lower in all_excludes -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column | lower not in all_includes -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n {%- set dbt_command = flags.WHICH -%}\n\n\n {% if dbt_command in ['run', 'build'] %}\n {% if (include | length > 0 or exclude | length > 0) and not column_superset.keys() %}\n {%- set relations_string -%}\n {%- for relation in relations -%}\n {{ relation.name }}\n {%- if not loop.last %}, {% endif -%}\n {%- endfor -%}\n {%- endset -%}\n\n {%- set error_message -%}\n There were no columns found to union for relations {{ relations_string }}\n {%- endset -%}\n\n {{ exceptions.raise_compiler_error(error_message) }}\n {%- endif -%}\n {%- endif -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n {%- if source_column_name is not none %}\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {%- endif %}\n\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ relation }}\n\n {% if where -%}\n where {{ where }}\n {%- endif %}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.785173, "supported_languages": null}, "macro.dbt_utils.group_by": {"name": "group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.group_by", "macro_sql": "{%- macro group_by(n) -%}\n {{ return(adapter.dispatch('group_by', 'dbt_utils')(n)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__group_by"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7854989, "supported_languages": null}, "macro.dbt_utils.default__group_by": {"name": "default__group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.default__group_by", "macro_sql": "\n\n{%- macro default__group_by(n) -%}\n\n group by {% for i in range(1, n + 1) -%}\n {{ i }}{{ ',' if not loop.last }} \n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7858539, "supported_languages": null}, "macro.dbt_utils.deduplicate": {"name": "deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.deduplicate", "macro_sql": "{%- macro deduplicate(relation, partition_by, order_by) -%}\n {{ return(adapter.dispatch('deduplicate', 'dbt_utils')(relation, partition_by, order_by)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.postgres__deduplicate"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.786747, "supported_languages": null}, "macro.dbt_utils.default__deduplicate": {"name": "default__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.default__deduplicate", "macro_sql": "\n\n{%- macro default__deduplicate(relation, partition_by, order_by) -%}\n\n with row_numbered as (\n select\n _inner.*,\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) as rn\n from {{ relation }} as _inner\n )\n\n select\n distinct data.*\n from {{ relation }} as data\n {#\n -- Not all DBs will support natural joins but the ones that do include:\n -- Oracle, MySQL, SQLite, Redshift, Teradata, Materialize, Databricks\n -- Apache Spark, SingleStore, Vertica\n -- Those that do not appear to support natural joins include:\n -- SQLServer, Trino, Presto, Rockset, Athena\n #}\n natural join row_numbered\n where row_numbered.rn = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7869592, "supported_languages": null}, "macro.dbt_utils.redshift__deduplicate": {"name": "redshift__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.redshift__deduplicate", "macro_sql": "{% macro redshift__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }} as tt\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.787272, "supported_languages": null}, "macro.dbt_utils.postgres__deduplicate": {"name": "postgres__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.postgres__deduplicate", "macro_sql": "\n{%- macro postgres__deduplicate(relation, partition_by, order_by) -%}\n\n select\n distinct on ({{ partition_by }}) *\n from {{ relation }}\n order by {{ partition_by }}{{ ',' ~ order_by }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7875688, "supported_languages": null}, "macro.dbt_utils.snowflake__deduplicate": {"name": "snowflake__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.snowflake__deduplicate", "macro_sql": "\n{%- macro snowflake__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }}\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.78778, "supported_languages": null}, "macro.dbt_utils.databricks__deduplicate": {"name": "databricks__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.databricks__deduplicate", "macro_sql": "\n{%- macro databricks__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }}\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.787955, "supported_languages": null}, "macro.dbt_utils.bigquery__deduplicate": {"name": "bigquery__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.bigquery__deduplicate", "macro_sql": "\n{%- macro bigquery__deduplicate(relation, partition_by, order_by) -%}\n\n select unique.*\n from (\n select\n array_agg (\n original\n order by {{ order_by }}\n limit 1\n )[offset(0)] unique\n from {{ relation }} original\n group by {{ partition_by }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.788131, "supported_languages": null}, "macro.dbt_utils.surrogate_key": {"name": "surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.surrogate_key", "macro_sql": "{%- macro surrogate_key(field_list) -%}\n {% set frustrating_jinja_feature = varargs %}\n {{ return(adapter.dispatch('surrogate_key', 'dbt_utils')(field_list, *varargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.788575, "supported_languages": null}, "macro.dbt_utils.default__surrogate_key": {"name": "default__surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.default__surrogate_key", "macro_sql": "\n\n{%- macro default__surrogate_key(field_list) -%}\n\n{%- set error_message = '\nWarning: `dbt_utils.surrogate_key` has been replaced by \\\n`dbt_utils.generate_surrogate_key`. The new macro treats null values \\\ndifferently to empty strings. To restore the behaviour of the original \\\nmacro, add a global variable in dbt_project.yml called \\\n`surrogate_key_treat_nulls_as_empty_strings` to your \\\ndbt_project.yml file with a value of True. \\\nThe {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7888231, "supported_languages": null}, "macro.dbt_utils.safe_add": {"name": "safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.safe_add", "macro_sql": "{%- macro safe_add(field_list) -%}\n {{ return(adapter.dispatch('safe_add', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.78927, "supported_languages": null}, "macro.dbt_utils.default__safe_add": {"name": "default__safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.default__safe_add", "macro_sql": "\n\n{%- macro default__safe_add(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_add` macro now takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.warn(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' +\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.789786, "supported_languages": null}, "macro.dbt_utils.nullcheck": {"name": "nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.nullcheck", "macro_sql": "{% macro nullcheck(cols) %}\n {{ return(adapter.dispatch('nullcheck', 'dbt_utils')(cols)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.790112, "supported_languages": null}, "macro.dbt_utils.default__nullcheck": {"name": "default__nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.default__nullcheck", "macro_sql": "{% macro default__nullcheck(cols) %}\n{%- for col in cols %}\n\n {% if col.is_string() -%}\n\n nullif({{col.name}},'') as {{col.name}}\n\n {%- else -%}\n\n {{col.name}}\n\n {%- endif -%}\n\n{%- if not loop.last -%} , {%- endif -%}\n\n{%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.79042, "supported_languages": null}, "macro.dbt_utils.get_tables_by_pattern_sql": {"name": "get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_pattern_sql", "macro_sql": "{% macro get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_pattern_sql', 'dbt_utils')\n (schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.793671, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_pattern_sql": {"name": "default__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_pattern_sql", "macro_sql": "{% macro default__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n select distinct\n table_schema as {{ adapter.quote('table_schema') }},\n table_name as {{ adapter.quote('table_name') }},\n {{ dbt_utils.get_table_types_sql() }}\n from {{ database }}.information_schema.tables\n where table_schema ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.794013, "supported_languages": null}, "macro.dbt_utils.redshift__get_tables_by_pattern_sql": {"name": "redshift__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.redshift__get_tables_by_pattern_sql", "macro_sql": "{% macro redshift__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {% set sql %}\n select distinct\n table_schema as {{ adapter.quote('table_schema') }},\n table_name as {{ adapter.quote('table_name') }},\n {{ dbt_utils.get_table_types_sql() }}\n from \"{{ database }}\".\"information_schema\".\"tables\"\n where table_schema ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n union all\n select distinct\n schemaname as {{ adapter.quote('table_schema') }},\n tablename as {{ adapter.quote('table_name') }},\n 'external' as {{ adapter.quote('table_type') }}\n from svv_external_tables\n where redshift_database_name = '{{ database }}'\n and schemaname ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n {% endset %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.794935, "supported_languages": null}, "macro.dbt_utils.bigquery__get_tables_by_pattern_sql": {"name": "bigquery__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.bigquery__get_tables_by_pattern_sql", "macro_sql": "{% macro bigquery__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {% if '%' in schema_pattern %}\n {% set schemata=dbt_utils._bigquery__get_matching_schemata(schema_pattern, database) %}\n {% else %}\n {% set schemata=[schema_pattern] %}\n {% endif %}\n\n {% set sql %}\n {% for schema in schemata %}\n select distinct\n table_schema,\n table_name,\n {{ dbt_utils.get_table_types_sql() }}\n\n from {{ adapter.quote(database) }}.{{ schema }}.INFORMATION_SCHEMA.TABLES\n where lower(table_name) like lower ('{{ table_pattern }}')\n and lower(table_name) not like lower ('{{ exclude }}')\n\n {% if not loop.last %} union all {% endif %}\n\n {% endfor %}\n {% endset %}\n\n {{ return(sql) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._bigquery__get_matching_schemata", "macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7956128, "supported_languages": null}, "macro.dbt_utils._bigquery__get_matching_schemata": {"name": "_bigquery__get_matching_schemata", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils._bigquery__get_matching_schemata", "macro_sql": "{% macro _bigquery__get_matching_schemata(schema_pattern, database) %}\n {% if execute %}\n\n {% set sql %}\n select schema_name from {{ adapter.quote(database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like lower('{{ schema_pattern }}')\n {% endset %}\n\n {% set results=run_query(sql) %}\n\n {% set schemata=results.columns['schema_name'].values() %}\n\n {{ return(schemata) }}\n\n {% else %}\n\n {{ return([]) }}\n\n {% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7960792, "supported_languages": null}, "macro.dbt_utils.get_column_values": {"name": "get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.get_column_values", "macro_sql": "{% macro get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {{ return(adapter.dispatch('get_column_values', 'dbt_utils')(table, column, order_by, max_records, default, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_column_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.79733, "supported_languages": null}, "macro.dbt_utils.default__get_column_values": {"name": "default__get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.default__get_column_values", "macro_sql": "{% macro default__get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {% set default = [] if not default %}\n {{ return(default) }}\n {% endif %}\n\n {%- do dbt_utils._is_ephemeral(table, 'get_column_values') -%}\n\n {# Not all relations are tables. Renaming for internal clarity without breaking functionality for anyone using named arguments #}\n {# TODO: Change the method signature in a future 0.x.0 release #}\n {%- set target_relation = table -%}\n\n {# adapter.load_relation is a convenience wrapper to avoid building a Relation when we already have one #}\n {% set relation_exists = (load_relation(target_relation)) is not none %}\n\n {%- call statement('get_column_values', fetch_result=true) %}\n\n {%- if not relation_exists and default is none -%}\n\n {{ exceptions.raise_compiler_error(\"In get_column_values(): relation \" ~ target_relation ~ \" does not exist and no default value was provided.\") }}\n\n {%- elif not relation_exists and default is not none -%}\n\n {{ log(\"Relation \" ~ target_relation ~ \" does not exist. Returning the default value: \" ~ default) }}\n\n {{ return(default) }}\n\n {%- else -%}\n\n\n select\n {{ column }} as value\n\n from {{ target_relation }}\n\n {% if where is not none %}\n where {{ where }}\n {% endif %}\n\n group by {{ column }}\n order by {{ order_by }}\n\n {% if max_records is not none %}\n limit {{ max_records }}\n {% endif %}\n\n {% endif %}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_column_values') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values) }}\n {%- else -%}\n {{ return(default) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_ephemeral", "macro.dbt.load_relation", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7990458, "supported_languages": null}, "macro.dbt_utils.pivot": {"name": "pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.pivot", "macro_sql": "{% macro pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {{ return(adapter.dispatch('pivot', 'dbt_utils')(column, values, alias, agg, cmp, prefix, suffix, then_value, else_value, quote_identifiers, distinct)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.800281, "supported_languages": null}, "macro.dbt_utils.default__pivot": {"name": "default__pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.default__pivot", "macro_sql": "{% macro default__pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {% for value in values %}\n {{ agg }}(\n {% if distinct %} distinct {% endif %}\n case\n when {{ column }} {{ cmp }} '{{ dbt.escape_single_quotes(value) }}'\n then {{ then_value }}\n else {{ else_value }}\n end\n )\n {% if alias %}\n {% if quote_identifiers %}\n as {{ adapter.quote(prefix ~ value ~ suffix) }}\n {% else %}\n as {{ dbt_utils.slugify(prefix ~ value ~ suffix) }}\n {% endif %}\n {% endif %}\n {% if not loop.last %},{% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.escape_single_quotes", "macro.dbt_utils.slugify"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.801087, "supported_languages": null}, "macro.dbt_utils.get_filtered_columns_in_relation": {"name": "get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.get_filtered_columns_in_relation", "macro_sql": "{% macro get_filtered_columns_in_relation(from, except=[]) -%}\n {{ return(adapter.dispatch('get_filtered_columns_in_relation', 'dbt_utils')(from, except)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.801541, "supported_languages": null}, "macro.dbt_utils.default__get_filtered_columns_in_relation": {"name": "default__get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.default__get_filtered_columns_in_relation", "macro_sql": "{% macro default__get_filtered_columns_in_relation(from, except=[]) -%}\n {%- do dbt_utils._is_relation(from, 'get_filtered_columns_in_relation') -%}\n {%- do dbt_utils._is_ephemeral(from, 'get_filtered_columns_in_relation') -%}\n\n {# -- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {{ return('') }}\n {% endif %}\n\n {%- set include_cols = [] %}\n {%- set cols = adapter.get_columns_in_relation(from) -%}\n {%- set except = except | map(\"lower\") | list %}\n {%- for col in cols -%}\n {%- if col.column|lower not in except -%}\n {% do include_cols.append(col.column) %}\n {%- endif %}\n {%- endfor %}\n\n {{ return(include_cols) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.802216, "supported_languages": null}, "macro.dbt_utils.width_bucket": {"name": "width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.width_bucket", "macro_sql": "{% macro width_bucket(expr, min_value, max_value, num_buckets) %}\n {{ return(adapter.dispatch('width_bucket', 'dbt_utils') (expr, min_value, max_value, num_buckets)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__width_bucket"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8029718, "supported_languages": null}, "macro.dbt_utils.default__width_bucket": {"name": "default__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.default__width_bucket", "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.803404, "supported_languages": null}, "macro.dbt_utils.snowflake__width_bucket": {"name": "snowflake__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.snowflake__width_bucket", "macro_sql": "{% macro snowflake__width_bucket(expr, min_value, max_value, num_buckets) %}\n width_bucket({{ expr }}, {{ min_value }}, {{ max_value }}, {{ num_buckets }} )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.803582, "supported_languages": null}, "macro.dbt_utils.get_query_results_as_dict": {"name": "get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.get_query_results_as_dict", "macro_sql": "{% macro get_query_results_as_dict(query) %}\n {{ return(adapter.dispatch('get_query_results_as_dict', 'dbt_utils')(query)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_query_results_as_dict"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.803956, "supported_languages": null}, "macro.dbt_utils.default__get_query_results_as_dict": {"name": "default__get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.default__get_query_results_as_dict", "macro_sql": "{% macro default__get_query_results_as_dict(query) %}\n\n{# This macro returns a dictionary of the form {column_name: (tuple_of_results)} #}\n\n {%- call statement('get_query_results', fetch_result=True,auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {% set sql_results={} %}\n\n {%- if execute -%}\n {% set sql_results_table = load_result('get_query_results').table.columns %}\n {% for column_name, column in sql_results_table.items() %}\n {% do sql_results.update({column_name: column.values()}) %}\n {% endfor %}\n {%- endif -%}\n\n {{ return(sql_results) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8045151, "supported_languages": null}, "macro.dbt_utils.generate_surrogate_key": {"name": "generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.generate_surrogate_key", "macro_sql": "{%- macro generate_surrogate_key(field_list) -%}\n {{ return(adapter.dispatch('generate_surrogate_key', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8049548, "supported_languages": null}, "macro.dbt_utils.default__generate_surrogate_key": {"name": "default__generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.default__generate_surrogate_key", "macro_sql": "\n\n{%- macro default__generate_surrogate_key(field_list) -%}\n\n{%- if var('surrogate_key_treat_nulls_as_empty_strings', False) -%}\n {%- set default_null_value = \"\" -%}\n{%- else -%}\n {%- set default_null_value = '_dbt_utils_surrogate_key_null_' -%}\n{%- endif -%}\n\n{%- set fields = [] -%}\n\n{%- for field in field_list -%}\n\n {%- do fields.append(\n \"coalesce(cast(\" ~ field ~ \" as \" ~ dbt.type_string() ~ \"), '\" ~ default_null_value ~\"')\"\n ) -%}\n\n {%- if not loop.last %}\n {%- do fields.append(\"'-'\") -%}\n {%- endif -%}\n\n{%- endfor -%}\n\n{{ dbt.hash(dbt.concat(fields)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.hash", "macro.dbt.concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.805551, "supported_languages": null}, "macro.dbt_utils.get_table_types_sql": {"name": "get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.get_table_types_sql", "macro_sql": "{%- macro get_table_types_sql() -%}\n {{ return(adapter.dispatch('get_table_types_sql', 'dbt_utils')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils.postgres__get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.806079, "supported_languages": null}, "macro.dbt_utils.default__get_table_types_sql": {"name": "default__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.default__get_table_types_sql", "macro_sql": "{% macro default__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'EXTERNAL TABLE' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8062139, "supported_languages": null}, "macro.dbt_utils.postgres__get_table_types_sql": {"name": "postgres__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.postgres__get_table_types_sql", "macro_sql": "{% macro postgres__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'FOREIGN' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.806346, "supported_languages": null}, "macro.dbt_utils.databricks__get_table_types_sql": {"name": "databricks__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.databricks__get_table_types_sql", "macro_sql": "{% macro databricks__get_table_types_sql() %}\n case table_type\n when 'MANAGED' then 'table'\n when 'BASE TABLE' then 'table'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.806481, "supported_languages": null}, "macro.dbt_utils.get_single_value": {"name": "get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.get_single_value", "macro_sql": "{% macro get_single_value(query, default=none) %}\n {{ return(adapter.dispatch('get_single_value', 'dbt_utils')(query, default)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_single_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8069718, "supported_languages": null}, "macro.dbt_utils.default__get_single_value": {"name": "default__get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.default__get_single_value", "macro_sql": "{% macro default__get_single_value(query, default) %}\n\n{# This macro returns the (0, 0) record in a query, i.e. the first row of the first column #}\n\n {%- call statement('get_query_result', fetch_result=True, auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {%- if execute -%}\n\n {% set r = load_result('get_query_result').table.columns[0].values() %}\n {% if r | length == 0 %}\n {% do print('Query `' ~ query ~ '` returned no rows. Using the default value: ' ~ default) %}\n {% set sql_result = default %}\n {% else %}\n {% set sql_result = r[0] %}\n {% endif %}\n \n {%- else -%}\n \n {% set sql_result = default %}\n \n {%- endif -%}\n\n {% do return(sql_result) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.808026, "supported_languages": null}, "macro.dbt_utils.degrees_to_radians": {"name": "degrees_to_radians", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.degrees_to_radians", "macro_sql": "{% macro degrees_to_radians(degrees) -%}\n acos(-1) * {{degrees}} / 180\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.809209, "supported_languages": null}, "macro.dbt_utils.haversine_distance": {"name": "haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.haversine_distance", "macro_sql": "{% macro haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n {{ return(adapter.dispatch('haversine_distance', 'dbt_utils')(lat1,lon1,lat2,lon2,unit)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__haversine_distance"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.809483, "supported_languages": null}, "macro.dbt_utils.default__haversine_distance": {"name": "default__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.default__haversine_distance", "macro_sql": "{% macro default__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n\n 2 * 3961 * asin(sqrt(power((sin(radians(({{ lat2 }} - {{ lat1 }}) / 2))), 2) +\n cos(radians({{lat1}})) * cos(radians({{lat2}})) *\n power((sin(radians(({{ lon2 }} - {{ lon1 }}) / 2))), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8100202, "supported_languages": null}, "macro.dbt_utils.bigquery__haversine_distance": {"name": "bigquery__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.bigquery__haversine_distance", "macro_sql": "{% macro bigquery__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{% set radians_lat1 = dbt_utils.degrees_to_radians(lat1) %}\n{% set radians_lat2 = dbt_utils.degrees_to_radians(lat2) %}\n{% set radians_lon1 = dbt_utils.degrees_to_radians(lon1) %}\n{% set radians_lon2 = dbt_utils.degrees_to_radians(lon2) %}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n 2 * 3961 * asin(sqrt(power(sin(({{ radians_lat2 }} - {{ radians_lat1 }}) / 2), 2) +\n cos({{ radians_lat1 }}) * cos({{ radians_lat2 }}) *\n power(sin(({{ radians_lon2 }} - {{ radians_lon1 }}) / 2), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.degrees_to_radians"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8107688, "supported_languages": null}, "macro.spark_utils.get_tables": {"name": "get_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_tables", "macro_sql": "{% macro get_tables(table_regex_pattern='.*') %}\n\n {% set tables = [] %}\n {% for database in spark__list_schemas('not_used') %}\n {% for table in spark__list_relations_without_caching(database[0]) %}\n {% set db_tablename = database[0] ~ \".\" ~ table[1] %}\n {% set is_match = modules.re.match(table_regex_pattern, db_tablename) %}\n {% if is_match %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('type', 'TYPE', 'Type'))|first %}\n {% if table_type[1]|lower != 'view' %}\n {{ tables.append(db_tablename) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% endfor %}\n {{ return(tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.815974, "supported_languages": null}, "macro.spark_utils.get_delta_tables": {"name": "get_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_delta_tables", "macro_sql": "{% macro get_delta_tables(table_regex_pattern='.*') %}\n\n {% set delta_tables = [] %}\n {% for db_tablename in get_tables(table_regex_pattern) %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('provider', 'PROVIDER', 'Provider'))|first %}\n {% if table_type[1]|lower == 'delta' %}\n {{ delta_tables.append(db_tablename) }}\n {% endif %}\n {% endfor %}\n {{ return(delta_tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.816612, "supported_languages": null}, "macro.spark_utils.get_statistic_columns": {"name": "get_statistic_columns", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_statistic_columns", "macro_sql": "{% macro get_statistic_columns(table) %}\n\n {% call statement('input_columns', fetch_result=True) %}\n SHOW COLUMNS IN {{ table }}\n {% endcall %}\n {% set input_columns = load_result('input_columns').table %}\n\n {% set output_columns = [] %}\n {% for column in input_columns %}\n {% call statement('column_information', fetch_result=True) %}\n DESCRIBE TABLE {{ table }} `{{ column[0] }}`\n {% endcall %}\n {% if not load_result('column_information').table[1][1].startswith('struct') and not load_result('column_information').table[1][1].startswith('array') %}\n {{ output_columns.append('`' ~ column[0] ~ '`') }}\n {% endif %}\n {% endfor %}\n {{ return(output_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.817649, "supported_languages": null}, "macro.spark_utils.spark_optimize_delta_tables": {"name": "spark_optimize_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_optimize_delta_tables", "macro_sql": "{% macro spark_optimize_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Optimizing \" ~ table) }}\n {% do run_query(\"optimize \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.818435, "supported_languages": null}, "macro.spark_utils.spark_vacuum_delta_tables": {"name": "spark_vacuum_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_vacuum_delta_tables", "macro_sql": "{% macro spark_vacuum_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Vacuuming \" ~ table) }}\n {% do run_query(\"vacuum \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8193111, "supported_languages": null}, "macro.spark_utils.spark_analyze_tables": {"name": "spark_analyze_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_analyze_tables", "macro_sql": "{% macro spark_analyze_tables(table_regex_pattern='.*') %}\n\n {% for table in get_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set columns = get_statistic_columns(table) | join(',') %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Analyzing \" ~ table) }}\n {% if columns != '' %}\n {% do run_query(\"analyze table \" ~ table ~ \" compute statistics for columns \" ~ columns) %}\n {% endif %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.spark_utils.get_statistic_columns", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8202581, "supported_languages": null}, "macro.spark_utils.spark__concat": {"name": "spark__concat", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/concat.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/concat.sql", "unique_id": "macro.spark_utils.spark__concat", "macro_sql": "{% macro spark__concat(fields) -%}\n concat({{ fields|join(', ') }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.820436, "supported_languages": null}, "macro.spark_utils.spark__type_numeric": {"name": "spark__type_numeric", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "unique_id": "macro.spark_utils.spark__type_numeric", "macro_sql": "{% macro spark__type_numeric() %}\n decimal(28, 6)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.82054, "supported_languages": null}, "macro.spark_utils.spark__dateadd": {"name": "spark__dateadd", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "unique_id": "macro.spark_utils.spark__dateadd", "macro_sql": "{% macro spark__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {%- set clock_component -%}\n {# make sure the dates + timestamps are real, otherwise raise an error asap #}\n to_unix_timestamp({{ spark_utils.assert_not_null('to_timestamp', from_date_or_timestamp) }})\n - to_unix_timestamp({{ spark_utils.assert_not_null('date', from_date_or_timestamp) }})\n {%- endset -%}\n\n {%- if datepart in ['day', 'week'] -%}\n \n {%- set multiplier = 7 if datepart == 'week' else 1 -%}\n\n to_timestamp(\n to_unix_timestamp(\n date_add(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ['month', 'quarter', 'year'] -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'month' -%} 1\n {%- elif datepart == 'quarter' -%} 3\n {%- elif datepart == 'year' -%} 12\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n to_unix_timestamp(\n add_months(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n {{ spark_utils.assert_not_null('to_unix_timestamp', from_date_or_timestamp) }}\n + cast({{interval}} * {{multiplier}} as int)\n )\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro dateadd not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8233302, "supported_languages": null}, "macro.spark_utils.spark__datediff": {"name": "spark__datediff", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datediff.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datediff.sql", "unique_id": "macro.spark_utils.spark__datediff", "macro_sql": "{% macro spark__datediff(first_date, second_date, datepart) %}\n\n {%- if datepart in ['day', 'week', 'month', 'quarter', 'year'] -%}\n \n {# make sure the dates are real, otherwise raise an error asap #}\n {% set first_date = spark_utils.assert_not_null('date', first_date) %}\n {% set second_date = spark_utils.assert_not_null('date', second_date) %}\n \n {%- endif -%}\n \n {%- if datepart == 'day' -%}\n \n datediff({{second_date}}, {{first_date}})\n \n {%- elif datepart == 'week' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(datediff({{second_date}}, {{first_date}})/7)\n else ceil(datediff({{second_date}}, {{first_date}})/7)\n end\n \n -- did we cross a week boundary (Sunday)?\n + case\n when {{first_date}} < {{second_date}} and dayofweek({{second_date}}) < dayofweek({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofweek({{second_date}}) > dayofweek({{first_date}}) then -1\n else 0 end\n\n {%- elif datepart == 'month' -%}\n\n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}})))\n else ceil(months_between(date({{second_date}}), date({{first_date}})))\n end\n \n -- did we cross a month boundary?\n + case\n when {{first_date}} < {{second_date}} and dayofmonth({{second_date}}) < dayofmonth({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofmonth({{second_date}}) > dayofmonth({{first_date}}) then -1\n else 0 end\n \n {%- elif datepart == 'quarter' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}}))/3)\n else ceil(months_between(date({{second_date}}), date({{first_date}}))/3)\n end\n \n -- did we cross a quarter boundary?\n + case\n when {{first_date}} < {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n < (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then 1\n when {{first_date}} > {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n > (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then -1\n else 0 end\n\n {%- elif datepart == 'year' -%}\n \n year({{second_date}}) - year({{first_date}})\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set divisor -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n case when {{first_date}} < {{second_date}}\n then ceil((\n {# make sure the timestamps are real, otherwise raise an error asap #}\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n else floor((\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n end\n \n {% if datepart == 'millisecond' %}\n + cast(date_format({{second_date}}, 'SSS') as int)\n - cast(date_format({{first_date}}, 'SSS') as int)\n {% endif %}\n \n {% if datepart == 'microsecond' %} \n {% set capture_str = '[0-9]{4}-[0-9]{2}-[0-9]{2}.[0-9]{2}:[0-9]{2}:[0-9]{2}.([0-9]{6})' %}\n -- Spark doesn't really support microseconds, so this is a massive hack!\n -- It will only work if the timestamp-string is of the format\n -- 'yyyy-MM-dd-HH mm.ss.SSSSSS'\n + cast(regexp_extract({{second_date}}, '{{capture_str}}', 1) as int)\n - cast(regexp_extract({{first_date}}, '{{capture_str}}', 1) as int) \n {% endif %}\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro datediff not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.83119, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp": {"name": "spark__current_timestamp", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp", "macro_sql": "{% macro spark__current_timestamp() %}\n current_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8313348, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp_in_utc": {"name": "spark__current_timestamp_in_utc", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp_in_utc", "macro_sql": "{% macro spark__current_timestamp_in_utc() %}\n unix_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.831409, "supported_languages": null}, "macro.spark_utils.spark__split_part": {"name": "spark__split_part", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/split_part.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/split_part.sql", "unique_id": "macro.spark_utils.spark__split_part", "macro_sql": "{% macro spark__split_part(string_text, delimiter_text, part_number) %}\n\n {% set delimiter_expr %}\n \n -- escape if starts with a special character\n case when regexp_extract({{ delimiter_text }}, '([^A-Za-z0-9])(.*)', 1) != '_'\n then concat('\\\\', {{ delimiter_text }})\n else {{ delimiter_text }} end\n \n {% endset %}\n\n {% set split_part_expr %}\n \n split(\n {{ string_text }},\n {{ delimiter_expr }}\n )[({{ part_number - 1 }})]\n \n {% endset %}\n \n {{ return(split_part_expr) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.831959, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_pattern": {"name": "spark__get_relations_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_pattern", "macro_sql": "{% macro spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n show table extended in {{ schema_pattern }} like '{{ table_pattern }}'\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=None,\n schema=row[0],\n identifier=row[1],\n type=('view' if 'Type: VIEW' in row[3] else 'table')\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.833505, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_prefix": {"name": "spark__get_relations_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_prefix", "macro_sql": "{% macro spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {% set table_pattern = table_pattern ~ '*' %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.833824, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_pattern": {"name": "spark__get_tables_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_pattern", "macro_sql": "{% macro spark__get_tables_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8340852, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_prefix": {"name": "spark__get_tables_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_prefix", "macro_sql": "{% macro spark__get_tables_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.834341, "supported_languages": null}, "macro.spark_utils.assert_not_null": {"name": "assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.assert_not_null", "macro_sql": "{% macro assert_not_null(function, arg) -%}\n {{ return(adapter.dispatch('assert_not_null', 'spark_utils')(function, arg)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.spark_utils.default__assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.834645, "supported_languages": null}, "macro.spark_utils.default__assert_not_null": {"name": "default__assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.default__assert_not_null", "macro_sql": "{% macro default__assert_not_null(function, arg) %}\n\n coalesce({{function}}({{arg}}), nvl2({{function}}({{arg}}), assert_true({{function}}({{arg}}) is not null), null))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.834836, "supported_languages": null}, "macro.spark_utils.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/snowplow/convert_timezone.sql", "original_file_path": "macros/snowplow/convert_timezone.sql", "unique_id": "macro.spark_utils.spark__convert_timezone", "macro_sql": "{% macro spark__convert_timezone(in_tz, out_tz, in_timestamp) %}\n from_utc_timestamp(to_utc_timestamp({{in_timestamp}}, {{in_tz}}), {{out_tz}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.835028, "supported_languages": null}, "macro.dbt_date.get_date_dimension": {"name": "get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.get_date_dimension", "macro_sql": "{% macro get_date_dimension(start_date, end_date) %}\n {{ adapter.dispatch('get_date_dimension', 'dbt_date') (start_date, end_date) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__get_date_dimension"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.844357, "supported_languages": null}, "macro.dbt_date.default__get_date_dimension": {"name": "default__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.default__get_date_dimension", "macro_sql": "{% macro default__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=false) }} as day_of_week,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week_iso,\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n cast({{ last_day('d.date_day', 'quarter') }} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.846505, "supported_languages": null}, "macro.dbt_date.postgres__get_date_dimension": {"name": "postgres__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.postgres__get_date_dimension", "macro_sql": "{% macro postgres__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week,\n\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n {# last_day does not support quarter because postgresql does not support quarter interval. #}\n cast({{dbt.dateadd('day', '-1', dbt.dateadd('month', '3', dbt.date_trunc('quarter', 'd.date_day')))}} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8489652, "supported_languages": null}, "macro.dbt_date.get_base_dates": {"name": "get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.get_base_dates", "macro_sql": "{% macro get_base_dates(start_date=None, end_date=None, n_dateparts=None, datepart=\"day\") %}\n {{ adapter.dispatch('get_base_dates', 'dbt_date') (start_date, end_date, n_dateparts, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_base_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.850685, "supported_languages": null}, "macro.dbt_date.default__get_base_dates": {"name": "default__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.default__get_base_dates", "macro_sql": "{% macro default__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.851424, "supported_languages": null}, "macro.dbt_date.bigquery__get_base_dates": {"name": "bigquery__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.bigquery__get_base_dates", "macro_sql": "{% macro bigquery__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as datetime )\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as datetime )\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.852235, "supported_languages": null}, "macro.dbt_date.trino__get_base_dates": {"name": "trino__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.trino__get_base_dates", "macro_sql": "{% macro trino__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.now()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.now", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.852952, "supported_languages": null}, "macro.dbt_date.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_date')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.853652, "supported_languages": null}, "macro.dbt_date.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.854202, "supported_languages": null}, "macro.dbt_date.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_date')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.854419, "supported_languages": null}, "macro.dbt_date.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{\n dbt_date.generate_series(\n dbt_date.get_intervals_between(start_date, end_date, datepart)\n )\n }}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"(row_number() over (order by 1) - 1)\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.generate_series", "macro.dbt_date.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.854762, "supported_languages": null}, "macro.dbt_date.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.855572, "supported_languages": null}, "macro.dbt_date.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.855958, "supported_languages": null}, "macro.dbt_date.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8561332, "supported_languages": null}, "macro.dbt_date.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_date.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.856618, "supported_languages": null}, "macro.dbt_date.date": {"name": "date", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.date", "macro_sql": "{% macro date(year, month, day) %}\n {{ return(modules.datetime.date(year, month, day)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.856937, "supported_languages": null}, "macro.dbt_date.datetime": {"name": "datetime", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.datetime", "macro_sql": "{% macro datetime(year, month, day, hour=0, minute=0, second=0, microsecond=0, tz=None) %}\n {% set tz = tz if tz else var(\"dbt_date:time_zone\") %}\n {{ return(\n modules.datetime.datetime(\n year=year, month=month, day=day, hour=hour,\n minute=minute, second=second, microsecond=microsecond,\n tzinfo=modules.pytz.timezone(tz)\n )\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.857451, "supported_languages": null}, "macro.dbt_date.get_fiscal_year_dates": {"name": "get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.get_fiscal_year_dates", "macro_sql": "{% macro get_fiscal_year_dates(dates, year_end_month=12, week_start_day=1, shift_year=1) %}\n{{ adapter.dispatch('get_fiscal_year_dates', 'dbt_date') (dates, year_end_month, week_start_day, shift_year) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_fiscal_year_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.859603, "supported_languages": null}, "macro.dbt_date.default__get_fiscal_year_dates": {"name": "default__get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.default__get_fiscal_year_dates", "macro_sql": "{% macro default__get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) %}\n-- this gets all the dates within a fiscal year\n-- determined by the given year-end-month\n-- ending on the saturday closest to that month's end date\nwith fsc_date_dimension as (\n select * from {{ dates }}\n),\nyear_month_end as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.month_end_date\n from\n fsc_date_dimension d\n where\n d.month_of_year = {{ year_end_month }}\n group by 1,2\n\n),\nweeks as (\n\n select\n d.year_number,\n d.month_of_year,\n d.date_day as week_start_date,\n cast({{ dbt.dateadd('day', 6, 'd.date_day') }} as date) as week_end_date\n from\n fsc_date_dimension d\n where\n d.day_of_week = {{ week_start_day }}\n\n),\n-- get all the weeks that start in the month the year ends\nyear_week_ends as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.week_end_date\n from\n weeks d\n where\n d.month_of_year = {{ year_end_month }}\n group by\n 1,2\n\n),\n-- then calculate which Saturday is closest to month end\nweeks_at_month_end as (\n\n select\n d.fiscal_year_number,\n d.week_end_date,\n m.month_end_date,\n rank() over\n (partition by d.fiscal_year_number\n order by\n abs({{ dbt.datediff('d.week_end_date', 'm.month_end_date', 'day') }})\n\n ) as closest_to_month_end\n from\n year_week_ends d\n join\n year_month_end m on d.fiscal_year_number = m.fiscal_year_number\n),\nfiscal_year_range as (\n\n select\n w.fiscal_year_number,\n cast(\n {{ dbt.dateadd('day', 1,\n 'lag(w.week_end_date) over(order by w.week_end_date)') }}\n as date) as fiscal_year_start_date,\n w.week_end_date as fiscal_year_end_date\n from\n weeks_at_month_end w\n where\n w.closest_to_month_end = 1\n\n),\nfiscal_year_dates as (\n\n select\n d.date_day,\n m.fiscal_year_number,\n m.fiscal_year_start_date,\n m.fiscal_year_end_date,\n w.week_start_date,\n w.week_end_date,\n -- we reset the weeks of the year starting with the merch year start date\n dense_rank()\n over(\n partition by m.fiscal_year_number\n order by w.week_start_date\n ) as fiscal_week_of_year\n from\n fsc_date_dimension d\n join\n fiscal_year_range m on d.date_day between m.fiscal_year_start_date and m.fiscal_year_end_date\n join\n weeks w on d.date_day between w.week_start_date and w.week_end_date\n\n)\nselect * from fiscal_year_dates order by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.860286, "supported_languages": null}, "macro.dbt_date.get_fiscal_periods": {"name": "get_fiscal_periods", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_periods.sql", "original_file_path": "macros/fiscal_date/get_fiscal_periods.sql", "unique_id": "macro.dbt_date.get_fiscal_periods", "macro_sql": "{% macro get_fiscal_periods(dates, year_end_month, week_start_day, shift_year=1) %}\n{#\nThis macro requires you to pass in a ref to a date dimension, created via\ndbt_date.get_date_dimension()s\n#}\nwith fscl_year_dates_for_periods as (\n {{ dbt_date.get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) }}\n),\nfscl_year_w13 as (\n\n select\n f.*,\n -- We count the weeks in a 13 week period\n -- and separate the 4-5-4 week sequences\n mod(cast(\n (f.fiscal_week_of_year-1) as {{ dbt.type_int() }}\n ), 13) as w13_number,\n -- Chop weeks into 13 week merch quarters\n cast(\n least(\n floor((f.fiscal_week_of_year-1)/13.0)\n , 3)\n as {{ dbt.type_int() }}) as quarter_number\n from\n fscl_year_dates_for_periods f\n\n),\nfscl_periods as (\n\n select\n f.date_day,\n f.fiscal_year_number,\n f.week_start_date,\n f.week_end_date,\n f.fiscal_week_of_year,\n case\n -- we move week 53 into the 3rd period of the quarter\n when f.fiscal_week_of_year = 53 then 3\n when f.w13_number between 0 and 3 then 1\n when f.w13_number between 4 and 8 then 2\n when f.w13_number between 9 and 12 then 3\n end as period_of_quarter,\n f.quarter_number\n from\n fscl_year_w13 f\n\n),\nfscl_periods_quarters as (\n\n select\n f.*,\n cast((\n (f.quarter_number * 3) + f.period_of_quarter\n ) as {{ dbt.type_int() }}) as fiscal_period_number\n from\n fscl_periods f\n\n)\nselect\n date_day,\n fiscal_year_number,\n week_start_date,\n week_end_date,\n fiscal_week_of_year,\n dense_rank() over(partition by fiscal_period_number order by fiscal_week_of_year) as fiscal_week_of_period,\n fiscal_period_number,\n quarter_number+1 as fiscal_quarter_number,\n period_of_quarter as fiscal_period_of_quarter\nfrom\n fscl_periods_quarters\norder by 1,2\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_fiscal_year_dates", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8613272, "supported_languages": null}, "macro.dbt_date.tomorrow": {"name": "tomorrow", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/tomorrow.sql", "original_file_path": "macros/calendar_date/tomorrow.sql", "unique_id": "macro.dbt_date.tomorrow", "macro_sql": "{%- macro tomorrow(date=None, tz=None) -%}\n{{ dbt_date.n_days_away(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8615942, "supported_languages": null}, "macro.dbt_date.next_week": {"name": "next_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_week.sql", "original_file_path": "macros/calendar_date/next_week.sql", "unique_id": "macro.dbt_date.next_week", "macro_sql": "{%- macro next_week(tz=None) -%}\n{{ dbt_date.n_weeks_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8617868, "supported_languages": null}, "macro.dbt_date.next_month_name": {"name": "next_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_name.sql", "original_file_path": "macros/calendar_date/next_month_name.sql", "unique_id": "macro.dbt_date.next_month_name", "macro_sql": "{%- macro next_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.next_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.862169, "supported_languages": null}, "macro.dbt_date.next_month": {"name": "next_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month.sql", "original_file_path": "macros/calendar_date/next_month.sql", "unique_id": "macro.dbt_date.next_month", "macro_sql": "{%- macro next_month(tz=None) -%}\n{{ dbt_date.n_months_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.862371, "supported_languages": null}, "macro.dbt_date.day_name": {"name": "day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.day_name", "macro_sql": "{%- macro day_name(date, short=True) -%}\n {{ adapter.dispatch('day_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.863244, "supported_languages": null}, "macro.dbt_date.default__day_name": {"name": "default__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.default__day_name", "macro_sql": "\n\n{%- macro default__day_name(date, short) -%}\n{%- set f = 'Dy' if short else 'Day' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.863463, "supported_languages": null}, "macro.dbt_date.snowflake__day_name": {"name": "snowflake__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.snowflake__day_name", "macro_sql": "\n\n{%- macro snowflake__day_name(date, short) -%}\n {%- if short -%}\n dayname({{ date }})\n {%- else -%}\n -- long version not implemented on Snowflake so we're doing it manually :/\n case dayname({{ date }})\n when 'Mon' then 'Monday'\n when 'Tue' then 'Tuesday'\n when 'Wed' then 'Wednesday'\n when 'Thu' then 'Thursday'\n when 'Fri' then 'Friday'\n when 'Sat' then 'Saturday'\n when 'Sun' then 'Sunday'\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.863741, "supported_languages": null}, "macro.dbt_date.bigquery__day_name": {"name": "bigquery__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.bigquery__day_name", "macro_sql": "\n\n{%- macro bigquery__day_name(date, short) -%}\n{%- set f = '%a' if short else '%A' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8639429, "supported_languages": null}, "macro.dbt_date.postgres__day_name": {"name": "postgres__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.postgres__day_name", "macro_sql": "\n\n{%- macro postgres__day_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMDy' if short else 'FMDay' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8641331, "supported_languages": null}, "macro.dbt_date.duckdb__day_name": {"name": "duckdb__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.duckdb__day_name", "macro_sql": "\n\n{%- macro duckdb__day_name(date, short) -%}\n {%- if short -%}\n substr(dayname({{ date }}), 1, 3)\n {%- else -%}\n dayname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.864307, "supported_languages": null}, "macro.dbt_date.spark__day_name": {"name": "spark__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.spark__day_name", "macro_sql": "\n\n{%- macro spark__day_name(date, short) -%}\n{%- set f = 'E' if short else 'EEEE' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8644931, "supported_languages": null}, "macro.dbt_date.trino__day_name": {"name": "trino__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.trino__day_name", "macro_sql": "\n\n{%- macro trino__day_name(date, short) -%}\n{%- set f = 'a' if short else 'W' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.864685, "supported_languages": null}, "macro.dbt_date.to_unixtimestamp": {"name": "to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.to_unixtimestamp", "macro_sql": "{%- macro to_unixtimestamp(timestamp) -%}\n {{ adapter.dispatch('to_unixtimestamp', 'dbt_date') (timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__to_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.865084, "supported_languages": null}, "macro.dbt_date.default__to_unixtimestamp": {"name": "default__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__to_unixtimestamp", "macro_sql": "\n\n{%- macro default__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8652182, "supported_languages": null}, "macro.dbt_date.snowflake__to_unixtimestamp": {"name": "snowflake__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__to_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch_seconds', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8653498, "supported_languages": null}, "macro.dbt_date.bigquery__to_unixtimestamp": {"name": "bigquery__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__to_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__to_unixtimestamp(timestamp) -%}\n unix_seconds({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.86545, "supported_languages": null}, "macro.dbt_date.spark__to_unixtimestamp": {"name": "spark__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.spark__to_unixtimestamp", "macro_sql": "\n\n{%- macro spark__to_unixtimestamp(timestamp) -%}\n unix_timestamp({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.865546, "supported_languages": null}, "macro.dbt_date.trino__to_unixtimestamp": {"name": "trino__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__to_unixtimestamp", "macro_sql": "\n\n{%- macro trino__to_unixtimestamp(timestamp) -%}\n to_unixtime({{ timestamp }} AT TIME ZONE 'UTC')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8656409, "supported_languages": null}, "macro.dbt_date.n_days_away": {"name": "n_days_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_away.sql", "original_file_path": "macros/calendar_date/n_days_away.sql", "unique_id": "macro.dbt_date.n_days_away", "macro_sql": "{%- macro n_days_away(n, date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(-1 * n, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.865872, "supported_languages": null}, "macro.dbt_date.week_start": {"name": "week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.week_start", "macro_sql": "{%- macro week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.866352, "supported_languages": null}, "macro.dbt_date.default__week_start": {"name": "default__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.default__week_start", "macro_sql": "{%- macro default__week_start(date) -%}\ncast({{ dbt.date_trunc('week', date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8664918, "supported_languages": null}, "macro.dbt_date.snowflake__week_start": {"name": "snowflake__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.snowflake__week_start", "macro_sql": "\n\n{%- macro snowflake__week_start(date) -%}\n {#\n Get the day of week offset: e.g. if the date is a Sunday,\n dbt_date.day_of_week returns 1, so we subtract 1 to get a 0 offset\n #}\n {% set off_set = dbt_date.day_of_week(date, isoweek=False) ~ \" - 1\" %}\n cast({{ dbt.dateadd(\"day\", \"-1 * (\" ~ off_set ~ \")\", date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.day_of_week", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.866833, "supported_languages": null}, "macro.dbt_date.postgres__week_start": {"name": "postgres__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.postgres__week_start", "macro_sql": "\n\n{%- macro postgres__week_start(date) -%}\n-- Sunday as week start date\ncast({{ dbt.dateadd('day', -1, dbt.date_trunc('week', dbt.dateadd('day', 1, date))) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8670778, "supported_languages": null}, "macro.dbt_date.duckdb__week_start": {"name": "duckdb__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.duckdb__week_start", "macro_sql": "\n\n{%- macro duckdb__week_start(date) -%}\n{{ return(dbt_date.postgres__week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.867218, "supported_languages": null}, "macro.dbt_date.iso_week_start": {"name": "iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.iso_week_start", "macro_sql": "{%- macro iso_week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868084, "supported_languages": null}, "macro.dbt_date._iso_week_start": {"name": "_iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date._iso_week_start", "macro_sql": "{%- macro _iso_week_start(date, week_type) -%}\ncast({{ dbt.date_trunc(week_type, date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868285, "supported_languages": null}, "macro.dbt_date.default__iso_week_start": {"name": "default__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.default__iso_week_start", "macro_sql": "\n\n{%- macro default__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868448, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_start": {"name": "snowflake__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_start", "macro_sql": "\n\n{%- macro snowflake__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868599, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_start": {"name": "postgres__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.postgres__iso_week_start", "macro_sql": "\n\n{%- macro postgres__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868748, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_start": {"name": "duckdb__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_start", "macro_sql": "\n\n{%- macro duckdb__iso_week_start(date) -%}\n{{ return(dbt_date.postgres__iso_week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868906, "supported_languages": null}, "macro.dbt_date.spark__iso_week_start": {"name": "spark__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.spark__iso_week_start", "macro_sql": "\n\n{%- macro spark__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8691032, "supported_languages": null}, "macro.dbt_date.trino__iso_week_start": {"name": "trino__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.trino__iso_week_start", "macro_sql": "\n\n{%- macro trino__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8692951, "supported_languages": null}, "macro.dbt_date.n_days_ago": {"name": "n_days_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_ago.sql", "original_file_path": "macros/calendar_date/n_days_ago.sql", "unique_id": "macro.dbt_date.n_days_ago", "macro_sql": "{%- macro n_days_ago(n, date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{%- set n = n|int -%}\ncast({{ dbt.dateadd('day', -1 * n, dt) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.869802, "supported_languages": null}, "macro.dbt_date.last_week": {"name": "last_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_week.sql", "original_file_path": "macros/calendar_date/last_week.sql", "unique_id": "macro.dbt_date.last_week", "macro_sql": "{%- macro last_week(tz=None) -%}\n{{ dbt_date.n_weeks_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.869994, "supported_languages": null}, "macro.dbt_date.now": {"name": "now", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/now.sql", "original_file_path": "macros/calendar_date/now.sql", "unique_id": "macro.dbt_date.now", "macro_sql": "{%- macro now(tz=None) -%}\n{{ dbt_date.convert_timezone(dbt.current_timestamp(), tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.convert_timezone", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8701801, "supported_languages": null}, "macro.dbt_date.periods_since": {"name": "periods_since", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/periods_since.sql", "original_file_path": "macros/calendar_date/periods_since.sql", "unique_id": "macro.dbt_date.periods_since", "macro_sql": "{%- macro periods_since(date_col, period_name='day', tz=None) -%}\n{{ dbt.datediff(date_col, dbt_date.now(tz), period_name) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.870424, "supported_languages": null}, "macro.dbt_date.today": {"name": "today", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/today.sql", "original_file_path": "macros/calendar_date/today.sql", "unique_id": "macro.dbt_date.today", "macro_sql": "{%- macro today(tz=None) -%}\ncast({{ dbt_date.now(tz) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.870589, "supported_languages": null}, "macro.dbt_date.last_month": {"name": "last_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month.sql", "original_file_path": "macros/calendar_date/last_month.sql", "unique_id": "macro.dbt_date.last_month", "macro_sql": "{%- macro last_month(tz=None) -%}\n{{ dbt_date.n_months_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.870763, "supported_languages": null}, "macro.dbt_date.day_of_year": {"name": "day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.day_of_year", "macro_sql": "{%- macro day_of_year(date) -%}\n{{ adapter.dispatch('day_of_year', 'dbt_date') (date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.871166, "supported_languages": null}, "macro.dbt_date.default__day_of_year": {"name": "default__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.default__day_of_year", "macro_sql": "\n\n{%- macro default__day_of_year(date) -%}\n {{ dbt_date.date_part('dayofyear', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.871304, "supported_languages": null}, "macro.dbt_date.postgres__day_of_year": {"name": "postgres__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.postgres__day_of_year", "macro_sql": "\n\n{%- macro postgres__day_of_year(date) -%}\n {{ dbt_date.date_part('doy', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8714392, "supported_languages": null}, "macro.dbt_date.redshift__day_of_year": {"name": "redshift__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.redshift__day_of_year", "macro_sql": "\n\n{%- macro redshift__day_of_year(date) -%}\n cast({{ dbt_date.date_part('dayofyear', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8716109, "supported_languages": null}, "macro.dbt_date.spark__day_of_year": {"name": "spark__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.spark__day_of_year", "macro_sql": "\n\n{%- macro spark__day_of_year(date) -%}\n dayofyear({{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.871788, "supported_languages": null}, "macro.dbt_date.trino__day_of_year": {"name": "trino__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.trino__day_of_year", "macro_sql": "\n\n{%- macro trino__day_of_year(date) -%}\n {{ dbt_date.date_part('day_of_year', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8719249, "supported_languages": null}, "macro.dbt_date.round_timestamp": {"name": "round_timestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/round_timestamp.sql", "original_file_path": "macros/calendar_date/round_timestamp.sql", "unique_id": "macro.dbt_date.round_timestamp", "macro_sql": "{% macro round_timestamp(timestamp) %}\n {{ dbt.date_trunc(\"day\", dbt.dateadd(\"hour\", 12, timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8721569, "supported_languages": null}, "macro.dbt_date.from_unixtimestamp": {"name": "from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.from_unixtimestamp", "macro_sql": "{%- macro from_unixtimestamp(epochs, format=\"seconds\") -%}\n {{ adapter.dispatch('from_unixtimestamp', 'dbt_date') (epochs, format) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__from_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.874784, "supported_languages": null}, "macro.dbt_date.default__from_unixtimestamp": {"name": "default__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__from_unixtimestamp", "macro_sql": "\n\n{%- macro default__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp({{ epochs }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.87505, "supported_languages": null}, "macro.dbt_date.postgres__from_unixtimestamp": {"name": "postgres__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.postgres__from_unixtimestamp", "macro_sql": "\n\n{%- macro postgres__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8753102, "supported_languages": null}, "macro.dbt_date.snowflake__from_unixtimestamp": {"name": "snowflake__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__from_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n {%- set scale = 0 -%}\n {%- elif format == \"milliseconds\" -%}\n {%- set scale = 3 -%}\n {%- elif format == \"microseconds\" -%}\n {%- set scale = 6 -%}\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp_ntz({{ epochs }}, {{ scale }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.875747, "supported_languages": null}, "macro.dbt_date.bigquery__from_unixtimestamp": {"name": "bigquery__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__from_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n timestamp_seconds({{ epochs }})\n {%- elif format == \"milliseconds\" -%}\n timestamp_millis({{ epochs }})\n {%- elif format == \"microseconds\" -%}\n timestamp_micros({{ epochs }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8761091, "supported_languages": null}, "macro.dbt_date.trino__from_unixtimestamp": {"name": "trino__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__from_unixtimestamp", "macro_sql": "\n\n{%- macro trino__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n cast(from_unixtime({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"milliseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 6)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"microseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 3)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"nanoseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8766668, "supported_languages": null}, "macro.dbt_date.duckdb__from_unixtimestamp": {"name": "duckdb__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.duckdb__from_unixtimestamp", "macro_sql": "\n\n\n{%- macro duckdb__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.876918, "supported_languages": null}, "macro.dbt_date.n_months_ago": {"name": "n_months_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_ago.sql", "original_file_path": "macros/calendar_date/n_months_ago.sql", "unique_id": "macro.dbt_date.n_months_ago", "macro_sql": "{%- macro n_months_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.87727, "supported_languages": null}, "macro.dbt_date.date_part": {"name": "date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.date_part", "macro_sql": "{% macro date_part(datepart, date) -%}\n {{ adapter.dispatch('date_part', 'dbt_date') (datepart, date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.877603, "supported_languages": null}, "macro.dbt_date.default__date_part": {"name": "default__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.default__date_part", "macro_sql": "{% macro default__date_part(datepart, date) -%}\n date_part('{{ datepart }}', {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.877815, "supported_languages": null}, "macro.dbt_date.bigquery__date_part": {"name": "bigquery__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.bigquery__date_part", "macro_sql": "{% macro bigquery__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8779898, "supported_languages": null}, "macro.dbt_date.trino__date_part": {"name": "trino__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.trino__date_part", "macro_sql": "{% macro trino__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.878127, "supported_languages": null}, "macro.dbt_date.n_weeks_away": {"name": "n_weeks_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_away.sql", "original_file_path": "macros/calendar_date/n_weeks_away.sql", "unique_id": "macro.dbt_date.n_weeks_away", "macro_sql": "{%- macro n_weeks_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.878518, "supported_languages": null}, "macro.dbt_date.day_of_month": {"name": "day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.day_of_month", "macro_sql": "{%- macro day_of_month(date) -%}\n{{ dbt_date.date_part('day', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8787389, "supported_languages": null}, "macro.dbt_date.redshift__day_of_month": {"name": "redshift__day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.redshift__day_of_month", "macro_sql": "\n\n{%- macro redshift__day_of_month(date) -%}\ncast({{ dbt_date.date_part('day', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.878942, "supported_languages": null}, "macro.dbt_date.yesterday": {"name": "yesterday", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/yesterday.sql", "original_file_path": "macros/calendar_date/yesterday.sql", "unique_id": "macro.dbt_date.yesterday", "macro_sql": "{%- macro yesterday(date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.879159, "supported_languages": null}, "macro.dbt_date.day_of_week": {"name": "day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.day_of_week", "macro_sql": "{%- macro day_of_week(date, isoweek=true) -%}\n{{ adapter.dispatch('day_of_week', 'dbt_date') (date, isoweek) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.882108, "supported_languages": null}, "macro.dbt_date.default__day_of_week": {"name": "default__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.default__day_of_week", "macro_sql": "\n\n{%- macro default__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else {{ dow }}\n end\n {%- else -%}\n {{ dow }} + 1\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.882408, "supported_languages": null}, "macro.dbt_date.snowflake__day_of_week": {"name": "snowflake__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.snowflake__day_of_week", "macro_sql": "\n\n{%- macro snowflake__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'dayofweekiso' -%}\n {{ dbt_date.date_part(dow_part, date) }}\n {%- else -%}\n {%- set dow_part = 'dayofweek' -%}\n case\n when {{ dbt_date.date_part(dow_part, date) }} = 7 then 1\n else {{ dbt_date.date_part(dow_part, date) }} + 1\n end\n {%- endif -%}\n\n\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8828, "supported_languages": null}, "macro.dbt_date.bigquery__day_of_week": {"name": "bigquery__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.bigquery__day_of_week", "macro_sql": "\n\n{%- macro bigquery__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (1) to Monday (2)\n when {{ dow }} = 1 then 7\n else {{ dow }} - 1\n end\n {%- else -%}\n {{ dow }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.883073, "supported_languages": null}, "macro.dbt_date.postgres__day_of_week": {"name": "postgres__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.postgres__day_of_week", "macro_sql": "\n\n\n{%- macro postgres__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'isodow' -%}\n -- Monday(1) to Sunday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} as {{ dbt.type_int() }})\n {%- else -%}\n {%- set dow_part = 'dow' -%}\n -- Sunday(1) to Saturday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} + 1 as {{ dbt.type_int() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.883479, "supported_languages": null}, "macro.dbt_date.redshift__day_of_week": {"name": "redshift__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.redshift__day_of_week", "macro_sql": "\n\n\n{%- macro redshift__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else cast({{ dow }} as {{ dbt.type_bigint() }})\n end\n {%- else -%}\n cast({{ dow }} + 1 as {{ dbt.type_bigint() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.883832, "supported_languages": null}, "macro.dbt_date.duckdb__day_of_week": {"name": "duckdb__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.duckdb__day_of_week", "macro_sql": "\n\n{%- macro duckdb__day_of_week(date, isoweek) -%}\n{{ return(dbt_date.postgres__day_of_week(date, isoweek)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.883997, "supported_languages": null}, "macro.dbt_date.spark__day_of_week": {"name": "spark__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.spark__day_of_week", "macro_sql": "\n\n\n{%- macro spark__day_of_week(date, isoweek) -%}\n\n {%- set dow = \"dayofweek_iso\" if isoweek else \"dayofweek\" -%}\n\n {{ dbt_date.date_part(dow, date) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.884205, "supported_languages": null}, "macro.dbt_date.trino__day_of_week": {"name": "trino__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.trino__day_of_week", "macro_sql": "\n\n\n{%- macro trino__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('day_of_week', date) -%}\n\n {%- if isoweek -%}\n {{ dow }}\n {%- else -%}\n case\n when {{ dow }} = 7 then 1\n else {{ dow }} + 1\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.884474, "supported_languages": null}, "macro.dbt_date.iso_week_end": {"name": "iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.iso_week_end", "macro_sql": "{%- macro iso_week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.default__iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8849258, "supported_languages": null}, "macro.dbt_date._iso_week_end": {"name": "_iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date._iso_week_end", "macro_sql": "{%- macro _iso_week_end(date, week_type) -%}\n{%- set dt = dbt_date.iso_week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.iso_week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.885154, "supported_languages": null}, "macro.dbt_date.default__iso_week_end": {"name": "default__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.default__iso_week_end", "macro_sql": "\n\n{%- macro default__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.885354, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_end": {"name": "snowflake__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_end", "macro_sql": "\n\n{%- macro snowflake__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.885514, "supported_languages": null}, "macro.dbt_date.n_weeks_ago": {"name": "n_weeks_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_ago.sql", "original_file_path": "macros/calendar_date/n_weeks_ago.sql", "unique_id": "macro.dbt_date.n_weeks_ago", "macro_sql": "{%- macro n_weeks_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8859, "supported_languages": null}, "macro.dbt_date.month_name": {"name": "month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.month_name", "macro_sql": "{%- macro month_name(date, short=True) -%}\n {{ adapter.dispatch('month_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__month_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8865972, "supported_languages": null}, "macro.dbt_date.default__month_name": {"name": "default__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.default__month_name", "macro_sql": "\n\n{%- macro default__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MONTH' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.886806, "supported_languages": null}, "macro.dbt_date.bigquery__month_name": {"name": "bigquery__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.bigquery__month_name", "macro_sql": "\n\n{%- macro bigquery__month_name(date, short) -%}\n{%- set f = '%b' if short else '%B' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.887115, "supported_languages": null}, "macro.dbt_date.snowflake__month_name": {"name": "snowflake__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.snowflake__month_name", "macro_sql": "\n\n{%- macro snowflake__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MMMM' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.887398, "supported_languages": null}, "macro.dbt_date.postgres__month_name": {"name": "postgres__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.postgres__month_name", "macro_sql": "\n\n{%- macro postgres__month_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMMon' if short else 'FMMonth' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.887707, "supported_languages": null}, "macro.dbt_date.duckdb__month_name": {"name": "duckdb__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.duckdb__month_name", "macro_sql": "\n\n\n{%- macro duckdb__month_name(date, short) -%}\n {%- if short -%}\n substr(monthname({{ date }}), 1, 3)\n {%- else -%}\n monthname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.887923, "supported_languages": null}, "macro.dbt_date.spark__month_name": {"name": "spark__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.spark__month_name", "macro_sql": "\n\n{%- macro spark__month_name(date, short) -%}\n{%- set f = 'MMM' if short else 'MMMM' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.888222, "supported_languages": null}, "macro.dbt_date.trino__month_name": {"name": "trino__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.trino__month_name", "macro_sql": "\n\n{%- macro trino__month_name(date, short) -%}\n{%- set f = 'b' if short else 'M' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.888424, "supported_languages": null}, "macro.dbt_date.last_month_name": {"name": "last_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_name.sql", "original_file_path": "macros/calendar_date/last_month_name.sql", "unique_id": "macro.dbt_date.last_month_name", "macro_sql": "{%- macro last_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.last_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.888675, "supported_languages": null}, "macro.dbt_date.week_of_year": {"name": "week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.week_of_year", "macro_sql": "{%- macro week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.889104, "supported_languages": null}, "macro.dbt_date.default__week_of_year": {"name": "default__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.default__week_of_year", "macro_sql": "{%- macro default__week_of_year(date) -%}\ncast({{ dbt_date.date_part('week', date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.88928, "supported_languages": null}, "macro.dbt_date.postgres__week_of_year": {"name": "postgres__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.postgres__week_of_year", "macro_sql": "\n\n{%- macro postgres__week_of_year(date) -%}\n{# postgresql 'week' returns isoweek. Use to_char instead.\n WW = the first week starts on the first day of the year #}\ncast(to_char({{ date }}, 'WW') as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8894238, "supported_languages": null}, "macro.dbt_date.duckdb__week_of_year": {"name": "duckdb__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__week_of_year", "macro_sql": "\n\n{%- macro duckdb__week_of_year(date) -%}\ncast(ceil(dayofyear({{ date }}) / 7) as int)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.889523, "supported_languages": null}, "macro.dbt_date.convert_timezone": {"name": "convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.convert_timezone", "macro_sql": "{%- macro convert_timezone(column, target_tz=None, source_tz=None) -%}\n{%- set source_tz = \"UTC\" if not source_tz else source_tz -%}\n{%- set target_tz = var(\"dbt_date:time_zone\") if not target_tz else target_tz -%}\n{{ adapter.dispatch('convert_timezone', 'dbt_date') (column, target_tz, source_tz) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.postgres__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8907678, "supported_languages": null}, "macro.dbt_date.default__convert_timezone": {"name": "default__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.default__convert_timezone", "macro_sql": "{% macro default__convert_timezone(column, target_tz, source_tz) -%}\nconvert_timezone('{{ source_tz }}', '{{ target_tz }}',\n cast({{ column }} as {{ dbt.type_timestamp() }})\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.890962, "supported_languages": null}, "macro.dbt_date.bigquery__convert_timezone": {"name": "bigquery__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.bigquery__convert_timezone", "macro_sql": "{%- macro bigquery__convert_timezone(column, target_tz, source_tz=None) -%}\ntimestamp(datetime({{ column }}, '{{ target_tz}}'))\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8911061, "supported_languages": null}, "macro.dbt_date.postgres__convert_timezone": {"name": "postgres__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.postgres__convert_timezone", "macro_sql": "{% macro postgres__convert_timezone(column, target_tz, source_tz) -%}\ncast(\n cast({{ column }} as {{ dbt.type_timestamp() }})\n at time zone '{{ source_tz }}' at time zone '{{ target_tz }}' as {{ dbt.type_timestamp() }}\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8913321, "supported_languages": null}, "macro.dbt_date.redshift__convert_timezone": {"name": "redshift__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.redshift__convert_timezone", "macro_sql": "{%- macro redshift__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.default__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.default__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8915122, "supported_languages": null}, "macro.dbt_date.duckdb__convert_timezone": {"name": "duckdb__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.duckdb__convert_timezone", "macro_sql": "{% macro duckdb__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.postgres__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.postgres__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.891695, "supported_languages": null}, "macro.dbt_date.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.spark__convert_timezone", "macro_sql": "{%- macro spark__convert_timezone(column, target_tz, source_tz) -%}\nfrom_utc_timestamp(\n to_utc_timestamp({{ column }}, '{{ source_tz }}'),\n '{{ target_tz }}'\n )\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.891853, "supported_languages": null}, "macro.dbt_date.trino__convert_timezone": {"name": "trino__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.trino__convert_timezone", "macro_sql": "{%- macro trino__convert_timezone(column, target_tz, source_tz) -%}\n cast((at_timezone(with_timezone(cast({{ column }} as {{ dbt.type_timestamp() }}), '{{ source_tz }}'), '{{ target_tz }}')) as {{ dbt.type_timestamp() }})\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.892076, "supported_languages": null}, "macro.dbt_date.n_months_away": {"name": "n_months_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_away.sql", "original_file_path": "macros/calendar_date/n_months_away.sql", "unique_id": "macro.dbt_date.n_months_away", "macro_sql": "{%- macro n_months_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.892406, "supported_languages": null}, "macro.dbt_date.iso_week_of_year": {"name": "iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.iso_week_of_year", "macro_sql": "{%- macro iso_week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8930979, "supported_languages": null}, "macro.dbt_date._iso_week_of_year": {"name": "_iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date._iso_week_of_year", "macro_sql": "{%- macro _iso_week_of_year(date, week_type) -%}\ncast({{ dbt_date.date_part(week_type, date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.893278, "supported_languages": null}, "macro.dbt_date.default__iso_week_of_year": {"name": "default__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.default__iso_week_of_year", "macro_sql": "\n\n{%- macro default__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.893416, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_of_year": {"name": "snowflake__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_of_year", "macro_sql": "\n\n{%- macro snowflake__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.893551, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_of_year": {"name": "postgres__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.postgres__iso_week_of_year", "macro_sql": "\n\n{%- macro postgres__iso_week_of_year(date) -%}\n-- postgresql week is isoweek, the first week of a year containing January 4 of that year.\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.893758, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_of_year": {"name": "duckdb__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_of_year", "macro_sql": "\n\n{%- macro duckdb__iso_week_of_year(date) -%}\n{{ return(dbt_date.postgres__iso_week_of_year(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.893895, "supported_languages": null}, "macro.dbt_date.spark__iso_week_of_year": {"name": "spark__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.spark__iso_week_of_year", "macro_sql": "\n\n{%- macro spark__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.894026, "supported_languages": null}, "macro.dbt_date.trino__iso_week_of_year": {"name": "trino__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.trino__iso_week_of_year", "macro_sql": "\n\n{%- macro trino__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8941572, "supported_languages": null}, "macro.dbt_date.week_end": {"name": "week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.week_end", "macro_sql": "{%- macro week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.894639, "supported_languages": null}, "macro.dbt_date.default__week_end": {"name": "default__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.default__week_end", "macro_sql": "{%- macro default__week_end(date) -%}\n{{ last_day(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.894767, "supported_languages": null}, "macro.dbt_date.snowflake__week_end": {"name": "snowflake__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.snowflake__week_end", "macro_sql": "\n\n{%- macro snowflake__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.894957, "supported_languages": null}, "macro.dbt_date.postgres__week_end": {"name": "postgres__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.postgres__week_end", "macro_sql": "\n\n{%- macro postgres__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.895152, "supported_languages": null}, "macro.dbt_date.duckdb__week_end": {"name": "duckdb__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.duckdb__week_end", "macro_sql": "\n\n{%- macro duckdb__week_end(date) -%}\n{{ return(dbt_date.postgres__week_end(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.895287, "supported_languages": null}, "macro.dbt_date.next_month_number": {"name": "next_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_number.sql", "original_file_path": "macros/calendar_date/next_month_number.sql", "unique_id": "macro.dbt_date.next_month_number", "macro_sql": "{%- macro next_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.next_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.895492, "supported_languages": null}, "macro.dbt_date.last_month_number": {"name": "last_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_number.sql", "original_file_path": "macros/calendar_date/last_month_number.sql", "unique_id": "macro.dbt_date.last_month_number", "macro_sql": "{%- macro last_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.last_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.895694, "supported_languages": null}, "macro.fivetran_utils.enabled_vars": {"name": "enabled_vars", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars.sql", "original_file_path": "macros/enabled_vars.sql", "unique_id": "macro.fivetran_utils.enabled_vars", "macro_sql": "{% macro enabled_vars(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, True) == False %}\n {{ return(False) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(True) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.896066, "supported_languages": null}, "macro.fivetran_utils.percentile": {"name": "percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.percentile", "macro_sql": "{% macro percentile(percentile_field, partition_field, percent) -%}\n\n{{ adapter.dispatch('percentile', 'fivetran_utils') (percentile_field, partition_field, percent) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__percentile"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.897011, "supported_languages": null}, "macro.fivetran_utils.default__percentile": {"name": "default__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.default__percentile", "macro_sql": "{% macro default__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.897287, "supported_languages": null}, "macro.fivetran_utils.redshift__percentile": {"name": "redshift__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.redshift__percentile", "macro_sql": "{% macro redshift__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.897564, "supported_languages": null}, "macro.fivetran_utils.bigquery__percentile": {"name": "bigquery__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.bigquery__percentile", "macro_sql": "{% macro bigquery__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8977559, "supported_languages": null}, "macro.fivetran_utils.postgres__percentile": {"name": "postgres__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.postgres__percentile", "macro_sql": "{% macro postgres__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n /* have to group by partition field */\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8979032, "supported_languages": null}, "macro.fivetran_utils.spark__percentile": {"name": "spark__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.spark__percentile", "macro_sql": "{% macro spark__percentile(percentile_field, partition_field, percent) %}\n\n percentile( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.898072, "supported_languages": null}, "macro.fivetran_utils.pivot_json_extract": {"name": "pivot_json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/pivot_json_extract.sql", "original_file_path": "macros/pivot_json_extract.sql", "unique_id": "macro.fivetran_utils.pivot_json_extract", "macro_sql": "{% macro pivot_json_extract(string, list_of_properties) %}\n\n{%- for property in list_of_properties -%}\n{%- if property is mapping -%}\nreplace( {{ fivetran_utils.json_extract(string, property.name) }}, '\"', '') as {{ property.alias if property.alias else property.name | replace(' ', '_') | replace('.', '_') | lower }}\n\n{%- else -%}\nreplace( {{ fivetran_utils.json_extract(string, property) }}, '\"', '') as {{ property | replace(' ', '_') | lower }}\n\n{%- endif -%}\n{%- if not loop.last -%},{%- endif %}\n{% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.899037, "supported_languages": null}, "macro.fivetran_utils.persist_pass_through_columns": {"name": "persist_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/persist_pass_through_columns.sql", "original_file_path": "macros/persist_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.persist_pass_through_columns", "macro_sql": "{% macro persist_pass_through_columns(pass_through_variable, identifier=none, transform='') %}\n\n{% if var(pass_through_variable, none) %}\n {% for field in var(pass_through_variable) %}\n , {{ transform ~ '(' ~ (identifier ~ '.' if identifier else '') ~ (field.alias if field.alias else field.name) ~ ')' }} as {{ field.alias if field.alias else field.name }}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.899756, "supported_languages": null}, "macro.fivetran_utils.json_parse": {"name": "json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.json_parse", "macro_sql": "{% macro json_parse(string, string_path) -%}\n\n{{ adapter.dispatch('json_parse', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__json_parse"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.90111, "supported_languages": null}, "macro.fivetran_utils.default__json_parse": {"name": "default__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.default__json_parse", "macro_sql": "{% macro default__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.901348, "supported_languages": null}, "macro.fivetran_utils.redshift__json_parse": {"name": "redshift__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.redshift__json_parse", "macro_sql": "{% macro redshift__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.901582, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_parse": {"name": "bigquery__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.bigquery__json_parse", "macro_sql": "{% macro bigquery__json_parse(string, string_path) %}\n\n \n json_extract_scalar({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.90181, "supported_languages": null}, "macro.fivetran_utils.postgres__json_parse": {"name": "postgres__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.postgres__json_parse", "macro_sql": "{% macro postgres__json_parse(string, string_path) %}\n\n {{string}}::json #>> '{ {%- for s in string_path -%}{{ s }}{%- if not loop.last -%},{%- endif -%}{%- endfor -%} }'\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9020329, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_parse": {"name": "snowflake__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.snowflake__json_parse", "macro_sql": "{% macro snowflake__json_parse(string, string_path) %}\n\n parse_json( {{string}} ) {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.902285, "supported_languages": null}, "macro.fivetran_utils.spark__json_parse": {"name": "spark__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.spark__json_parse", "macro_sql": "{% macro spark__json_parse(string, string_path) %}\n\n {{string}} : {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.902535, "supported_languages": null}, "macro.fivetran_utils.sqlserver__json_parse": {"name": "sqlserver__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.sqlserver__json_parse", "macro_sql": "{% macro sqlserver__json_parse(string, string_path) %}\n\n json_value({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.90276, "supported_languages": null}, "macro.fivetran_utils.max_bool": {"name": "max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.max_bool", "macro_sql": "{% macro max_bool(boolean_field) -%}\n\n{{ adapter.dispatch('max_bool', 'fivetran_utils') (boolean_field) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__max_bool"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.903072, "supported_languages": null}, "macro.fivetran_utils.default__max_bool": {"name": "default__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.default__max_bool", "macro_sql": "{% macro default__max_bool(boolean_field) %}\n\n bool_or( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.903174, "supported_languages": null}, "macro.fivetran_utils.snowflake__max_bool": {"name": "snowflake__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.snowflake__max_bool", "macro_sql": "{% macro snowflake__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.903285, "supported_languages": null}, "macro.fivetran_utils.bigquery__max_bool": {"name": "bigquery__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.bigquery__max_bool", "macro_sql": "{% macro bigquery__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.903381, "supported_languages": null}, "macro.fivetran_utils.calculated_fields": {"name": "calculated_fields", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/calculated_fields.sql", "original_file_path": "macros/calculated_fields.sql", "unique_id": "macro.fivetran_utils.calculated_fields", "macro_sql": "{% macro calculated_fields(variable) -%}\n\n{% if var(variable, none) %}\n {% for field in var(variable) %}\n , {{ field.transform_sql }} as {{ field.name }} \n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.903732, "supported_languages": null}, "macro.fivetran_utils.drop_schemas_automation": {"name": "drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.drop_schemas_automation", "macro_sql": "{% macro drop_schemas_automation(drop_target_schema=true) %}\n {{ return(adapter.dispatch('drop_schemas_automation', 'fivetran_utils')(drop_target_schema)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__drop_schemas_automation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9044108, "supported_languages": null}, "macro.fivetran_utils.default__drop_schemas_automation": {"name": "default__drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.default__drop_schemas_automation", "macro_sql": "{% macro default__drop_schemas_automation(drop_target_schema=true) %}\n\n{% set fetch_list_sql %}\n {% if target.type not in ('databricks', 'spark') %}\n select schema_name\n from \n {{ wrap_in_quotes(target.database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like '{{ target.schema | lower }}{%- if not drop_target_schema -%}_{%- endif -%}%'\n {% else %}\n SHOW SCHEMAS LIKE '{{ target.schema }}{%- if not drop_target_schema -%}_{%- endif -%}*'\n {% endif %}\n{% endset %}\n\n{% set results = run_query(fetch_list_sql) %}\n\n{% if execute %}\n {% set results_list = results.columns[0].values() %}\n{% else %}\n {% set results_list = [] %}\n{% endif %}\n\n{% for schema_to_drop in results_list %}\n {% do adapter.drop_schema(api.Relation.create(database=target.database, schema=schema_to_drop)) %}\n {{ print('Schema ' ~ schema_to_drop ~ ' successfully dropped from the ' ~ target.database ~ ' database.\\n')}}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.wrap_in_quotes", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.905568, "supported_languages": null}, "macro.fivetran_utils.seed_data_helper": {"name": "seed_data_helper", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/seed_data_helper.sql", "original_file_path": "macros/seed_data_helper.sql", "unique_id": "macro.fivetran_utils.seed_data_helper", "macro_sql": "{% macro seed_data_helper(seed_name, warehouses) %}\n\n{% if target.type in warehouses %}\n {% for w in warehouses %}\n {% if target.type == w %}\n {{ return(ref(seed_name ~ \"_\" ~ w ~ \"\")) }}\n {% endif %}\n {% endfor %}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.90611, "supported_languages": null}, "macro.fivetran_utils.fill_pass_through_columns": {"name": "fill_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_pass_through_columns.sql", "original_file_path": "macros/fill_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.fill_pass_through_columns", "macro_sql": "{% macro fill_pass_through_columns(pass_through_variable) %}\n\n{% if var(pass_through_variable) %}\n {% for field in var(pass_through_variable) %}\n {% if field is mapping %}\n {% if field.transform_sql %}\n , {{ field.transform_sql }} as {{ field.alias if field.alias else field.name }}\n {% else %}\n , {{ field.alias if field.alias else field.name }}\n {% endif %}\n {% else %}\n , {{ field }}\n {% endif %}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.906854, "supported_languages": null}, "macro.fivetran_utils.string_agg": {"name": "string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.string_agg", "macro_sql": "{% macro string_agg(field_to_agg, delimiter) -%}\n\n{{ adapter.dispatch('string_agg', 'fivetran_utils') (field_to_agg, delimiter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__string_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.907521, "supported_languages": null}, "macro.fivetran_utils.default__string_agg": {"name": "default__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.default__string_agg", "macro_sql": "{% macro default__string_agg(field_to_agg, delimiter) %}\n string_agg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.907723, "supported_languages": null}, "macro.fivetran_utils.snowflake__string_agg": {"name": "snowflake__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.snowflake__string_agg", "macro_sql": "{% macro snowflake__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.907857, "supported_languages": null}, "macro.fivetran_utils.redshift__string_agg": {"name": "redshift__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.redshift__string_agg", "macro_sql": "{% macro redshift__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9079921, "supported_languages": null}, "macro.fivetran_utils.spark__string_agg": {"name": "spark__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.spark__string_agg", "macro_sql": "{% macro spark__string_agg(field_to_agg, delimiter) %}\n -- collect set will remove duplicates\n replace(replace(replace(cast( collect_set({{ field_to_agg }}) as string), '[', ''), ']', ''), ', ', {{ delimiter }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.908143, "supported_languages": null}, "macro.fivetran_utils.timestamp_diff": {"name": "timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.timestamp_diff", "macro_sql": "{% macro timestamp_diff(first_date, second_date, datepart) %}\n {{ adapter.dispatch('timestamp_diff', 'fivetran_utils')(first_date, second_date, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__timestamp_diff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.911305, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_diff": {"name": "default__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.default__timestamp_diff", "macro_sql": "{% macro default__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.911476, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_diff": {"name": "redshift__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_diff", "macro_sql": "{% macro redshift__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.911635, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_diff": {"name": "bigquery__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_diff", "macro_sql": "{% macro bigquery__timestamp_diff(first_date, second_date, datepart) %}\n\n timestamp_diff(\n {{second_date}},\n {{first_date}},\n {{datepart}}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.911784, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_diff": {"name": "postgres__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_diff", "macro_sql": "{% macro postgres__timestamp_diff(first_date, second_date, datepart) %}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ dbt.datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.941542, "supported_languages": null}, "macro.fivetran_utils.try_cast": {"name": "try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.try_cast", "macro_sql": "{% macro try_cast(field, type) %}\n {{ adapter.dispatch('try_cast', 'fivetran_utils') (field, type) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__try_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.94254, "supported_languages": null}, "macro.fivetran_utils.default__try_cast": {"name": "default__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.default__try_cast", "macro_sql": "{% macro default__try_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.942681, "supported_languages": null}, "macro.fivetran_utils.redshift__try_cast": {"name": "redshift__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.redshift__try_cast", "macro_sql": "{% macro redshift__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when trim({{field}}) ~ '^(0|[1-9][0-9]*)$' then trim({{field}})\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.942951, "supported_languages": null}, "macro.fivetran_utils.postgres__try_cast": {"name": "postgres__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.postgres__try_cast", "macro_sql": "{% macro postgres__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar)) ~ '^(0|[1-9][0-9]*)$' \n then replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar))\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.943232, "supported_languages": null}, "macro.fivetran_utils.snowflake__try_cast": {"name": "snowflake__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.snowflake__try_cast", "macro_sql": "{% macro snowflake__try_cast(field, type) %}\n try_cast(cast({{field}} as varchar) as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.94336, "supported_languages": null}, "macro.fivetran_utils.bigquery__try_cast": {"name": "bigquery__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.bigquery__try_cast", "macro_sql": "{% macro bigquery__try_cast(field, type) %}\n safe_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9434838, "supported_languages": null}, "macro.fivetran_utils.spark__try_cast": {"name": "spark__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.spark__try_cast", "macro_sql": "{% macro spark__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.943609, "supported_languages": null}, "macro.fivetran_utils.sqlserver__try_cast": {"name": "sqlserver__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.sqlserver__try_cast", "macro_sql": "{% macro sqlserver__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9437351, "supported_languages": null}, "macro.fivetran_utils.source_relation": {"name": "source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.source_relation", "macro_sql": "{% macro source_relation(union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('source_relation', 'fivetran_utils') (union_schema_variable, union_database_variable) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__source_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9442198, "supported_languages": null}, "macro.fivetran_utils.default__source_relation": {"name": "default__source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.default__source_relation", "macro_sql": "{% macro default__source_relation(union_schema_variable, union_database_variable) %}\n\n{% if var(union_schema_variable, none) %}\n, case\n {% for schema in var(union_schema_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%.{{ schema|lower }}.%' then '{{ schema|lower }}'\n {% endfor %}\n end as source_relation\n{% elif var(union_database_variable, none) %}\n, case\n {% for database in var(union_database_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%{{ database|lower }}.%' then '{{ database|lower }}'\n {% endfor %}\n end as source_relation\n{% else %}\n, cast('' as {{ dbt.type_string() }}) as source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.944769, "supported_languages": null}, "macro.fivetran_utils.first_value": {"name": "first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.first_value", "macro_sql": "{% macro first_value(first_value_field, partition_field, order_by_field, order=\"asc\") -%}\n\n{{ adapter.dispatch('first_value', 'fivetran_utils') (first_value_field, partition_field, order_by_field, order) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__first_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9452648, "supported_languages": null}, "macro.fivetran_utils.default__first_value": {"name": "default__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.default__first_value", "macro_sql": "{% macro default__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.945462, "supported_languages": null}, "macro.fivetran_utils.redshift__first_value": {"name": "redshift__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.redshift__first_value", "macro_sql": "{% macro redshift__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} , {{ partition_field }} rows unbounded preceding )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.945674, "supported_languages": null}, "macro.fivetran_utils.add_dbt_source_relation": {"name": "add_dbt_source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_dbt_source_relation.sql", "original_file_path": "macros/add_dbt_source_relation.sql", "unique_id": "macro.fivetran_utils.add_dbt_source_relation", "macro_sql": "{% macro add_dbt_source_relation() %}\n\n{% if var('union_schemas', none) or var('union_databases', none) %}\n, _dbt_source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.945919, "supported_languages": null}, "macro.fivetran_utils.add_pass_through_columns": {"name": "add_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_pass_through_columns.sql", "original_file_path": "macros/add_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.add_pass_through_columns", "macro_sql": "{% macro add_pass_through_columns(base_columns, pass_through_var) %}\n\n {% if pass_through_var %}\n\n {% for column in pass_through_var %}\n\n {% if column is mapping %}\n\n {% if column.alias %}\n\n {% do base_columns.append({ \"name\": column.name, \"alias\": column.alias, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column.name, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n \n {% endif %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column, \"datatype\": dbt.type_string()}) %}\n\n {% endif %}\n\n {% endfor %}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.947016, "supported_languages": null}, "macro.fivetran_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, aliases=none, column_override=none, include=[], exclude=[], source_column_name=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n {%- set source_column_name = source_column_name if source_column_name is not none else '_dbt_source_relation' -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column in exclude -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column not in include -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ aliases[loop.index0] if aliases else relation }}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.951387, "supported_languages": null}, "macro.fivetran_utils.union_tables": {"name": "union_tables", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_tables", "macro_sql": "{%- macro union_tables(tables, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_table') -%}\n\n {%- do exceptions.warn(\"Warning: the `union_tables` macro is no longer supported and will be deprecated in a future release of dbt-utils. Use the `union_relations` macro instead\") -%}\n\n {{ return(dbt_utils.union_relations(tables, column_override, include, exclude, source_column_name)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.951798, "supported_languages": null}, "macro.fivetran_utils.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.fivetran_utils.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.952193, "supported_languages": null}, "macro.fivetran_utils.fill_staging_columns": {"name": "fill_staging_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.fill_staging_columns", "macro_sql": "{% macro fill_staging_columns(source_columns, staging_columns) -%}\n\n{%- set source_column_names = source_columns|map(attribute='name')|map('lower')|list -%}\n\n{%- for column in staging_columns %}\n {% if column.name|lower in source_column_names -%}\n {{ fivetran_utils.quote_column(column) }} as \n {%- if 'alias' in column %} {{ column.alias }} {% else %} {{ fivetran_utils.quote_column(column) }} {%- endif -%}\n {%- else -%}\n cast(null as {{ column.datatype }})\n {%- if 'alias' in column %} as {{ column.alias }} {% else %} as {{ fivetran_utils.quote_column(column) }} {% endif -%}\n {%- endif -%}\n {%- if not loop.last -%} , {% endif -%}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.quote_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.95381, "supported_languages": null}, "macro.fivetran_utils.quote_column": {"name": "quote_column", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.quote_column", "macro_sql": "{% macro quote_column(column) %}\n {% if 'quote' in column %}\n {% if column.quote %}\n {% if target.type in ('bigquery', 'spark', 'databricks') %}\n `{{ column.name }}`\n {% elif target.type == 'snowflake' %}\n \"{{ column.name | upper }}\"\n {% else %}\n \"{{ column.name }}\"\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.954347, "supported_languages": null}, "macro.fivetran_utils.json_extract": {"name": "json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.json_extract", "macro_sql": "{% macro json_extract(string, string_path) -%}\n\n{{ adapter.dispatch('json_extract', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.954916, "supported_languages": null}, "macro.fivetran_utils.default__json_extract": {"name": "default__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.default__json_extract", "macro_sql": "{% macro default__json_extract(string, string_path) %}\n\n json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} )\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.955071, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_extract": {"name": "snowflake__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.snowflake__json_extract", "macro_sql": "{% macro snowflake__json_extract(string, string_path) %}\n\n json_extract_path_text(try_parse_json( {{string}} ), {{ \"'\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.955221, "supported_languages": null}, "macro.fivetran_utils.redshift__json_extract": {"name": "redshift__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.redshift__json_extract", "macro_sql": "{% macro redshift__json_extract(string, string_path) %}\n\n case when is_valid_json( {{string}} ) then json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} ) else null end\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.95539, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_extract": {"name": "bigquery__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.bigquery__json_extract", "macro_sql": "{% macro bigquery__json_extract(string, string_path) %}\n\n json_extract_scalar({{string}}, {{ \"'$.\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.955538, "supported_languages": null}, "macro.fivetran_utils.postgres__json_extract": {"name": "postgres__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.postgres__json_extract", "macro_sql": "{% macro postgres__json_extract(string, string_path) %}\n\n {{string}}::json->>{{\"'\" ~ string_path ~ \"'\" }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9556842, "supported_languages": null}, "macro.fivetran_utils.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.956507, "supported_languages": null}, "macro.fivetran_utils.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n\n {%- set enabled_array = [] -%}\n {% for node in graph.sources.values() %}\n {% if node.identifier == source.identifier %}\n {% if (node.meta['is_enabled'] | default(true)) %}\n {%- do enabled_array.append(1) -%}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% set is_enabled = (enabled_array != []) %}\n\n select\n {% if is_enabled %}\n max({{ loaded_at_field }})\n {% else %} \n {{ current_timestamp() }} {% endif %} as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n\n {% if is_enabled %}\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endif %}\n\n {% endcall %}\n\n {% if dbt_version.split('.') | map('int') | list >= [1, 5, 0] %}\n {{ return(load_result('collect_freshness')) }}\n {% else %}\n {{ return(load_result('collect_freshness').table) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.957946, "supported_languages": null}, "macro.fivetran_utils.timestamp_add": {"name": "timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.timestamp_add", "macro_sql": "{% macro timestamp_add(datepart, interval, from_timestamp) -%}\n\n{{ adapter.dispatch('timestamp_add', 'fivetran_utils') (datepart, interval, from_timestamp) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__timestamp_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9587672, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_add": {"name": "default__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.default__timestamp_add", "macro_sql": "{% macro default__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestampadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9589472, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_add": {"name": "bigquery__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_add", "macro_sql": "{% macro bigquery__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestamp_add({{ from_timestamp }}, interval {{ interval }} {{ datepart }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959111, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_add": {"name": "redshift__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_add", "macro_sql": "{% macro redshift__timestamp_add(datepart, interval, from_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959275, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_add": {"name": "postgres__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_add", "macro_sql": "{% macro postgres__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ from_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959442, "supported_languages": null}, "macro.fivetran_utils.spark__timestamp_add": {"name": "spark__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.spark__timestamp_add", "macro_sql": "{% macro spark__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ dbt.dateadd(datepart, interval, from_timestamp) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959622, "supported_languages": null}, "macro.fivetran_utils.ceiling": {"name": "ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.ceiling", "macro_sql": "{% macro ceiling(num) -%}\n\n{{ adapter.dispatch('ceiling', 'fivetran_utils') (num) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__ceiling"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959878, "supported_languages": null}, "macro.fivetran_utils.default__ceiling": {"name": "default__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.default__ceiling", "macro_sql": "{% macro default__ceiling(num) %}\n ceiling({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959998, "supported_languages": null}, "macro.fivetran_utils.snowflake__ceiling": {"name": "snowflake__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.snowflake__ceiling", "macro_sql": "{% macro snowflake__ceiling(num) %}\n ceil({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9601128, "supported_languages": null}, "macro.fivetran_utils.remove_prefix_from_columns": {"name": "remove_prefix_from_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/remove_prefix_from_columns.sql", "original_file_path": "macros/remove_prefix_from_columns.sql", "unique_id": "macro.fivetran_utils.remove_prefix_from_columns", "macro_sql": "{% macro remove_prefix_from_columns(columns, prefix='', exclude=[]) %}\n\n {%- for col in columns if col.name not in exclude -%}\n {%- if col.name[:prefix|length]|lower == prefix -%}\n {{ col.name }} as {{ col.name[prefix|length:] }}\n {%- else -%}\n {{ col.name }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.960762, "supported_languages": null}, "macro.fivetran_utils.fivetran_date_spine": {"name": "fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.fivetran_date_spine", "macro_sql": "{% macro fivetran_date_spine(datepart, start_date, end_date) -%}\n\n{{ return(adapter.dispatch('fivetran_date_spine', 'fivetran_utils') (datepart, start_date, end_date)) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__fivetran_date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9621708, "supported_languages": null}, "macro.fivetran_utils.default__fivetran_date_spine": {"name": "default__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.default__fivetran_date_spine", "macro_sql": "{% macro default__fivetran_date_spine(datepart, start_date, end_date) %}\n\n {{ dbt_utils.date_spine(datepart, start_date, end_date) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.962369, "supported_languages": null}, "macro.fivetran_utils.sqlserver__fivetran_date_spine": {"name": "sqlserver__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.sqlserver__fivetran_date_spine", "macro_sql": "{% macro sqlserver__fivetran_date_spine(datepart, start_date, end_date) -%}\n\n {% set date_spine_query %}\n with\n\n l0 as (\n\n select c\n from (select 1 union all select 1) as d(c)\n\n ),\n l1 as (\n\n select\n 1 as c\n from l0 as a\n cross join l0 as b\n\n ),\n\n l2 as (\n\n select 1 as c\n from l1 as a\n cross join l1 as b\n ),\n\n l3 as (\n\n select 1 as c\n from l2 as a\n cross join l2 as b\n ),\n\n l4 as (\n\n select 1 as c\n from l3 as a\n cross join l3 as b\n ),\n\n l5 as (\n\n select 1 as c\n from l4 as a\n cross join l4 as b\n ),\n\n nums as (\n\n select row_number() over (order by (select null)) as rownum\n from l5\n ),\n\n rawdata as (\n\n select top ({{dbt.datediff(start_date, end_date, datepart)}}) rownum -1 as n\n from nums\n order by rownum\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n 'n',\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n order by 1\n\n {% endset %}\n\n {% set results = run_query(date_spine_query) %}\n\n {% if execute %}\n\n {% set results_list = results.columns[0].values() %}\n \n {% else %}\n\n {% set results_list = [] %}\n\n {% endif %}\n\n {%- for date_field in results_list %}\n select cast('{{ date_field }}' as date) as date_{{datepart}} {{ 'union all ' if not loop.last else '' }}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt.dateadd", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.963255, "supported_languages": null}, "macro.fivetran_utils.union_data": {"name": "union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.union_data", "macro_sql": "{%- macro union_data(table_identifier, database_variable, schema_variable, default_database, default_schema, default_variable, union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('union_data', 'fivetran_utils') (\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.default__union_data"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.96718, "supported_languages": null}, "macro.fivetran_utils.default__union_data": {"name": "default__union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.default__union_data", "macro_sql": "{%- macro default__union_data(\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) -%}\n\n{%- if var(union_schema_variable, none) -%}\n\n {%- set relations = [] -%}\n \n {%- if var(union_schema_variable) is string -%}\n {%- set trimmed = var(union_schema_variable)|trim('[')|trim(']') -%}\n {%- set schemas = trimmed.split(',')|map('trim',\" \")|map('trim','\"')|map('trim',\"'\") -%}\n {%- else -%}\n {%- set schemas = var(union_schema_variable) -%}\n {%- endif -%}\n\n {%- for schema in var(union_schema_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else var(database_variable, default_database),\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else schema,\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n \n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n \n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- elif var(union_database_variable, none) -%}\n\n {%- set relations = [] -%}\n\n {%- for database in var(union_database_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else database,\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else var(schema_variable, default_schema),\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n\n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n\n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- else -%}\n {% set exception_schemas = {\"linkedin_company_pages\": \"linkedin_pages\", \"instagram_business_pages\": \"instagram_business\"} %}\n {% set relation = namespace(value=\"\") %}\n {% if default_schema in exception_schemas.keys() %}\n {% for corrected_schema_name in exception_schemas.items() %} \n {% if default_schema in corrected_schema_name %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = corrected_schema_name[1] + \"_\" + table_identifier + \"_identifier\" %}\n {%- set relation.value=adapter.get_relation(\n database=source(corrected_schema_name[1], table_identifier).database,\n schema=source(corrected_schema_name[1], table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n {% endfor %}\n {% else %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifier\" %}\n {# Unfortunately the Twitter Organic identifiers were misspelled. As such, we will need to account for this in the model. This will be adjusted in the Twitter Organic package, but to ensure backwards compatibility, this needs to be included. #}\n {% if var(identifier_var, none) is none %} \n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifer\" %}\n {% endif %}\n {%- set relation.value=adapter.get_relation(\n database=source(default_schema, table_identifier).database,\n schema=source(default_schema, table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n{%- set table_exists=relation.value is not none -%}\n\n{%- if table_exists -%}\n select * \n from {{ relation.value }}\n{%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n{%- endif -%}\n{%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.971964, "supported_languages": null}, "macro.fivetran_utils.dummy_coalesce_value": {"name": "dummy_coalesce_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/dummy_coalesce_value.sql", "original_file_path": "macros/dummy_coalesce_value.sql", "unique_id": "macro.fivetran_utils.dummy_coalesce_value", "macro_sql": "{% macro dummy_coalesce_value(column) %}\n\n{% set coalesce_value = {\n 'STRING': \"'DUMMY_STRING'\",\n 'BOOLEAN': 'null',\n 'INT': 999999999,\n 'FLOAT': 999999999.99,\n 'TIMESTAMP': 'cast(\"2099-12-31\" as timestamp)',\n 'DATE': 'cast(\"2099-12-31\" as date)',\n} %}\n\n{% if column.is_float() %}\n{{ return(coalesce_value['FLOAT']) }}\n\n{% elif column.is_numeric() %}\n{{ return(coalesce_value['INT']) }}\n\n{% elif column.is_string() %}\n{{ return(coalesce_value['STRING']) }}\n\n{% elif column.data_type|lower == 'boolean' %}\n{{ return(coalesce_value['BOOLEAN']) }}\n\n{% elif 'timestamp' in column.data_type|lower %}\n{{ return(coalesce_value['TIMESTAMP']) }}\n\n{% elif 'date' in column.data_type|lower %}\n{{ return(coalesce_value['DATE']) }}\n\n{% elif 'int' in column.data_type|lower %}\n{{ return(coalesce_value['INT']) }}\n\n{% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.973359, "supported_languages": null}, "macro.fivetran_utils.extract_url_parameter": {"name": "extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.extract_url_parameter", "macro_sql": "{% macro extract_url_parameter(field, url_parameter) -%}\n\n{{ adapter.dispatch('extract_url_parameter', 'fivetran_utils') (field, url_parameter) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__extract_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9736981, "supported_languages": null}, "macro.fivetran_utils.default__extract_url_parameter": {"name": "default__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.default__extract_url_parameter", "macro_sql": "{% macro default__extract_url_parameter(field, url_parameter) -%}\n\n{{ dbt_utils.get_url_parameter(field, url_parameter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9738472, "supported_languages": null}, "macro.fivetran_utils.spark__extract_url_parameter": {"name": "spark__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.spark__extract_url_parameter", "macro_sql": "{% macro spark__extract_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"=([^&]+)'\" -%}\nnullif(regexp_extract({{ field }}, {{ formatted_url_parameter }}, 1), '')\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.974036, "supported_languages": null}, "macro.fivetran_utils.wrap_in_quotes": {"name": "wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.wrap_in_quotes", "macro_sql": "{%- macro wrap_in_quotes(object_to_quote) -%}\n\n{{ return(adapter.dispatch('wrap_in_quotes', 'fivetran_utils')(object_to_quote)) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.postgres__wrap_in_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9743621, "supported_languages": null}, "macro.fivetran_utils.default__wrap_in_quotes": {"name": "default__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.default__wrap_in_quotes", "macro_sql": "{%- macro default__wrap_in_quotes(object_to_quote) -%}\n{# bigquery, spark, databricks #}\n `{{ object_to_quote }}`\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.974487, "supported_languages": null}, "macro.fivetran_utils.snowflake__wrap_in_quotes": {"name": "snowflake__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.snowflake__wrap_in_quotes", "macro_sql": "{%- macro snowflake__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote | upper }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.974599, "supported_languages": null}, "macro.fivetran_utils.redshift__wrap_in_quotes": {"name": "redshift__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.redshift__wrap_in_quotes", "macro_sql": "{%- macro redshift__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9746969, "supported_languages": null}, "macro.fivetran_utils.postgres__wrap_in_quotes": {"name": "postgres__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.postgres__wrap_in_quotes", "macro_sql": "{%- macro postgres__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.974793, "supported_languages": null}, "macro.fivetran_utils.array_agg": {"name": "array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.array_agg", "macro_sql": "{% macro array_agg(field_to_agg) -%}\n\n{{ adapter.dispatch('array_agg', 'fivetran_utils') (field_to_agg) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__array_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.975043, "supported_languages": null}, "macro.fivetran_utils.default__array_agg": {"name": "default__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.default__array_agg", "macro_sql": "{% macro default__array_agg(field_to_agg) %}\n array_agg({{ field_to_agg }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.975143, "supported_languages": null}, "macro.fivetran_utils.redshift__array_agg": {"name": "redshift__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.redshift__array_agg", "macro_sql": "{% macro redshift__array_agg(field_to_agg) %}\n listagg({{ field_to_agg }}, ',')\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.975241, "supported_languages": null}, "macro.fivetran_utils.empty_variable_warning": {"name": "empty_variable_warning", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/empty_variable_warning.sql", "original_file_path": "macros/empty_variable_warning.sql", "unique_id": "macro.fivetran_utils.empty_variable_warning", "macro_sql": "{% macro empty_variable_warning(variable, downstream_model) %}\n\n{% if not var(variable) %}\n{{ log(\n \"\"\"\n Warning: You have passed an empty list to the \"\"\" ~ variable ~ \"\"\".\n As a result, you won't see the history of any columns in the \"\"\" ~ downstream_model ~ \"\"\" model.\n \"\"\",\n info=True\n) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9756238, "supported_languages": null}, "macro.fivetran_utils.enabled_vars_one_true": {"name": "enabled_vars_one_true", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars_one_true.sql", "original_file_path": "macros/enabled_vars_one_true.sql", "unique_id": "macro.fivetran_utils.enabled_vars_one_true", "macro_sql": "{% macro enabled_vars_one_true(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, False) == True %}\n {{ return(True) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(False) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9759948, "supported_languages": null}, "macro.zendesk.regex_extract": {"name": "regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.regex_extract", "macro_sql": "{% macro regex_extract(string, day) -%}\n\n{{ adapter.dispatch('regex_extract', 'zendesk') (string, day) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.postgres__regex_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.976825, "supported_languages": null}, "macro.zendesk.default__regex_extract": {"name": "default__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.default__regex_extract", "macro_sql": "{% macro default__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n regexp_extract({{ string }}, {{ regex }} )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.977014, "supported_languages": null}, "macro.zendesk.bigquery__regex_extract": {"name": "bigquery__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.bigquery__regex_extract", "macro_sql": "{% macro bigquery__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n regexp_extract({{ string }}, {{ regex }} )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.977202, "supported_languages": null}, "macro.zendesk.snowflake__regex_extract": {"name": "snowflake__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.snowflake__regex_extract", "macro_sql": "{% macro snowflake__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n\n REGEXP_SUBSTR({{ string }}, {{ regex }}, 1, 1, 'e', 1 )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.977466, "supported_languages": null}, "macro.zendesk.postgres__regex_extract": {"name": "postgres__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.postgres__regex_extract", "macro_sql": "{% macro postgres__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n\n (regexp_matches({{ string }}, {{ regex }}))[1]\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9778008, "supported_languages": null}, "macro.zendesk.redshift__regex_extract": {"name": "redshift__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.redshift__regex_extract", "macro_sql": "{% macro redshift__regex_extract(string, day) %}\n\n {% set regex = '\"' ~ day ~ '\"' ~ ':\\\\\\{([^\\\\\\}]*)\\\\\\}' -%}\n\n '{' || REGEXP_SUBSTR({{ string }}, '{{ regex }}', 1, 1, 'e') || '}'\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9780962, "supported_languages": null}, "macro.zendesk.spark__regex_extract": {"name": "spark__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.spark__regex_extract", "macro_sql": "{% macro spark__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" | replace(\"{\", \"\\\\\\{\") | replace(\"}\", \"\\\\\\}\") %}\n regexp_extract({{ string }}, {{ regex }}, 1)\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.978452, "supported_languages": null}, "macro.zendesk.coalesce_cast": {"name": "coalesce_cast", "resource_type": "macro", "package_name": "zendesk", "path": "macros/coalesce_cast.sql", "original_file_path": "macros/coalesce_cast.sql", "unique_id": "macro.zendesk.coalesce_cast", "macro_sql": "{% macro coalesce_cast(column_list, datatype) -%}\n {{ return(adapter.dispatch('coalesce_cast', 'zendesk')(column_list, datatype)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.default__coalesce_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.978838, "supported_languages": null}, "macro.zendesk.default__coalesce_cast": {"name": "default__coalesce_cast", "resource_type": "macro", "package_name": "zendesk", "path": "macros/coalesce_cast.sql", "original_file_path": "macros/coalesce_cast.sql", "unique_id": "macro.zendesk.default__coalesce_cast", "macro_sql": "{% macro default__coalesce_cast(column_list, datatype) %}\n coalesce(\n {%- for column in column_list %}\n cast({{ column }} as {{ datatype }})\n {%- if not loop.last -%},{%- endif -%}\n {% endfor %}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.979129, "supported_languages": null}, "macro.zendesk.clean_schedule": {"name": "clean_schedule", "resource_type": "macro", "package_name": "zendesk", "path": "macros/clean_schedule.sql", "original_file_path": "macros/clean_schedule.sql", "unique_id": "macro.zendesk.clean_schedule", "macro_sql": "{% macro clean_schedule(column_name) -%}\n {{ return(adapter.dispatch('clean_schedule', 'zendesk')(column_name)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.default__clean_schedule"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9794052, "supported_languages": null}, "macro.zendesk.default__clean_schedule": {"name": "default__clean_schedule", "resource_type": "macro", "package_name": "zendesk", "path": "macros/clean_schedule.sql", "original_file_path": "macros/clean_schedule.sql", "unique_id": "macro.zendesk.default__clean_schedule", "macro_sql": "{% macro default__clean_schedule(column_name) -%}\n replace(replace(replace(replace(cast({{ column_name }} as {{ dbt.type_string() }}), '{', ''), '}', ''), '\"', ''), ' ', '')\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.97955, "supported_languages": null}, "macro.zendesk.count_tokens": {"name": "count_tokens", "resource_type": "macro", "package_name": "zendesk", "path": "macros/count_tokens.sql", "original_file_path": "macros/count_tokens.sql", "unique_id": "macro.zendesk.count_tokens", "macro_sql": "{% macro count_tokens(column_name) -%}\n {{ return(adapter.dispatch('count_tokens', 'zendesk')(column_name)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.default__count_tokens"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.979813, "supported_languages": null}, "macro.zendesk.default__count_tokens": {"name": "default__count_tokens", "resource_type": "macro", "package_name": "zendesk", "path": "macros/count_tokens.sql", "original_file_path": "macros/count_tokens.sql", "unique_id": "macro.zendesk.default__count_tokens", "macro_sql": "{% macro default__count_tokens(column_name) %}\n {{ dbt.length(column_name) }} / 4 -- 1 token is approximately 4 characters, and we only need an approximation here.\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9799511, "supported_languages": null}, "macro.zendesk_source.get_domain_name_columns": {"name": "get_domain_name_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_domain_name_columns.sql", "original_file_path": "macros/get_domain_name_columns.sql", "unique_id": "macro.zendesk_source.get_domain_name_columns", "macro_sql": "{% macro get_domain_name_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"domain_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"index\", \"datatype\": dbt.type_int()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9805791, "supported_languages": null}, "macro.zendesk_source.get_user_tag_columns": {"name": "get_user_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_tag_columns.sql", "original_file_path": "macros/get_user_tag_columns.sql", "unique_id": "macro.zendesk_source.get_user_tag_columns", "macro_sql": "{% macro get_user_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.981683, "supported_languages": null}, "macro.zendesk_source.get_audit_log_columns": {"name": "get_audit_log_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_audit_log_columns.sql", "original_file_path": "macros/get_audit_log_columns.sql", "unique_id": "macro.zendesk_source.get_audit_log_columns", "macro_sql": "{% macro get_audit_log_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"action\", \"datatype\": dbt.type_string()},\n {\"name\": \"actor_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"change_description\", \"datatype\": dbt.type_string()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"source_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"source_label\", \"datatype\": dbt.type_string()},\n {\"name\": \"source_type\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.982717, "supported_languages": null}, "macro.zendesk_source.get_ticket_form_history_columns": {"name": "get_ticket_form_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_form_history_columns.sql", "original_file_path": "macros/get_ticket_form_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_form_history_columns", "macro_sql": "{% macro get_ticket_form_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"display_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"end_user_visible\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9837399, "supported_languages": null}, "macro.zendesk_source.get_schedule_columns": {"name": "get_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_columns.sql", "original_file_path": "macros/get_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_columns", "macro_sql": "{% macro get_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"end_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"start_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.985162, "supported_languages": null}, "macro.zendesk_source.get_daylight_time_columns": {"name": "get_daylight_time_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_daylight_time_columns.sql", "original_file_path": "macros/get_daylight_time_columns.sql", "unique_id": "macro.zendesk_source.get_daylight_time_columns", "macro_sql": "{% macro get_daylight_time_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"daylight_end_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"daylight_offset\", \"datatype\": dbt.type_int()},\n {\"name\": \"daylight_start_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"year\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.985893, "supported_languages": null}, "macro.zendesk_source.get_time_zone_columns": {"name": "get_time_zone_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_time_zone_columns.sql", "original_file_path": "macros/get_time_zone_columns.sql", "unique_id": "macro.zendesk_source.get_time_zone_columns", "macro_sql": "{% macro get_time_zone_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"standard_offset\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.986357, "supported_languages": null}, "macro.zendesk_source.get_ticket_tag_columns": {"name": "get_ticket_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_tag_columns.sql", "original_file_path": "macros/get_ticket_tag_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_tag_columns", "macro_sql": "{% macro get_ticket_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.987481, "supported_languages": null}, "macro.zendesk_source.get_organization_tag_columns": {"name": "get_organization_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_tag_columns.sql", "original_file_path": "macros/get_organization_tag_columns.sql", "unique_id": "macro.zendesk_source.get_organization_tag_columns", "macro_sql": "{% macro get_organization_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9889228, "supported_languages": null}, "macro.zendesk_source.get_schedule_holiday_columns": {"name": "get_schedule_holiday_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_holiday_columns.sql", "original_file_path": "macros/get_schedule_holiday_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_holiday_columns", "macro_sql": "{% macro get_schedule_holiday_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_date\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_date\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.989802, "supported_languages": null}, "macro.zendesk_source.get_group_columns": {"name": "get_group_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_group_columns.sql", "original_file_path": "macros/get_group_columns.sql", "unique_id": "macro.zendesk_source.get_group_columns", "macro_sql": "{% macro get_group_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.990656, "supported_languages": null}, "macro.zendesk_source.get_user_columns": {"name": "get_user_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_columns.sql", "original_file_path": "macros/get_user_columns.sql", "unique_id": "macro.zendesk_source.get_user_columns", "macro_sql": "{% macro get_user_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"alias\", \"datatype\": dbt.type_string()},\n {\"name\": \"authenticity_token\", \"datatype\": dbt.type_int()},\n {\"name\": \"chat_only\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"email\", \"datatype\": dbt.type_string()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"last_login_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"locale\", \"datatype\": dbt.type_string()},\n {\"name\": \"locale_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"moderator\", \"datatype\": \"boolean\"},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"only_private_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"phone\", \"datatype\": dbt.type_string()},\n {\"name\": \"remote_photo_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"restricted_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"role\", \"datatype\": dbt.type_string()},\n {\"name\": \"shared\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"signature\", \"datatype\": dbt.type_int()},\n {\"name\": \"suspended\", \"datatype\": \"boolean\"},\n {\"name\": \"ticket_restriction\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"two_factor_auth_enabled\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"verified\", \"datatype\": \"boolean\"}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__user_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_boolean", "macro.dbt.type_string", "macro.dbt.type_int", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.994375, "supported_languages": null}, "macro.zendesk_source.get_ticket_columns": {"name": "get_ticket_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_columns.sql", "original_file_path": "macros/get_ticket_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_columns", "macro_sql": "{% macro get_ticket_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"allow_channelback\", \"datatype\": \"boolean\"},\n {\"name\": \"assignee_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"brand_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"description\", \"datatype\": dbt.type_string()},\n {\"name\": \"due_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"forum_topic_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"has_incidents\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"is_public\", \"datatype\": \"boolean\"},\n {\"name\": \"merged_ticket_ids\", \"datatype\": dbt.type_string()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"priority\", \"datatype\": dbt.type_string()},\n {\"name\": \"problem_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"recipient\", \"datatype\": dbt.type_int()},\n {\"name\": \"requester_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"status\", \"datatype\": dbt.type_string()},\n {\"name\": \"subject\", \"datatype\": dbt.type_string()},\n {\"name\": \"submitter_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_ccs\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_client\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_ip_address\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_json_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_latitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_location\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_longitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_machine_generated\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_message_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_raw_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_form_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"type\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_channel\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_source_from_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_title\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_rel\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_name\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__ticket_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_boolean", "macro.dbt.type_int", "macro.dbt.type_string", "macro.dbt.type_float", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9995441, "supported_languages": null}, "macro.zendesk_source.get_ticket_field_history_columns": {"name": "get_ticket_field_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_field_history_columns.sql", "original_file_path": "macros/get_ticket_field_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_field_history_columns", "macro_sql": "{% macro get_ticket_field_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"field_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"updated\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"value\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492760.000344, "supported_languages": null}, "macro.zendesk_source.get_ticket_schedule_columns": {"name": "get_ticket_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_schedule_columns.sql", "original_file_path": "macros/get_ticket_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_schedule_columns", "macro_sql": "{% macro get_ticket_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492760.000938, "supported_languages": null}, "macro.zendesk_source.get_organization_columns": {"name": "get_organization_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_columns.sql", "original_file_path": "macros/get_organization_columns.sql", "unique_id": "macro.zendesk_source.get_organization_columns", "macro_sql": "{% macro get_organization_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"shared_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_tickets\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__organization_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492760.0023942, "supported_languages": null}, "macro.zendesk_source.get_ticket_comment_columns": {"name": "get_ticket_comment_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_comment_columns.sql", "original_file_path": "macros/get_ticket_comment_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_comment_columns", "macro_sql": "{% macro get_ticket_comment_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_string()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"body\", \"datatype\": dbt.type_string()},\n {\"name\": \"call_duration\", \"datatype\": dbt.type_int()},\n {\"name\": \"call_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"facebook_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"location\", \"datatype\": dbt.type_int()},\n {\"name\": \"public\", \"datatype\": \"boolean\"},\n {\"name\": \"recording_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"started_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_status\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_text\", \"datatype\": dbt.type_int()},\n {\"name\": \"trusted\", \"datatype\": dbt.type_int()},\n {\"name\": \"tweet\", \"datatype\": \"boolean\"},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"voice_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"voice_comment_transcription_visible\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.type_boolean", "macro.dbt.type_int", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492760.004552, "supported_languages": null}, "macro.zendesk_source.get_brand_columns": {"name": "get_brand_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_brand_columns.sql", "original_file_path": "macros/get_brand_columns.sql", "unique_id": "macro.zendesk_source.get_brand_columns", "macro_sql": "{% macro get_brand_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"brand_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"has_help_center\", \"datatype\": \"boolean\"},\n {\"name\": \"help_center_state\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_content_type\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_file_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_height\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_inline\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_mapped_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_size\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_width\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"subdomain\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492760.006764, "supported_languages": null}}, "docs": {"doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {}, "groups": {}, "selectors": {}, "disabled": {"test.zendesk_integration_tests.consistency_ticket_metrics": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_ticket_metrics", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_ticket_metrics.sql", "original_file_path": "tests/consistency/consistency_ticket_metrics.sql", "unique_id": "test.zendesk_integration_tests.consistency_ticket_metrics", "fqn": ["zendesk_integration_tests", "consistency", "consistency_ticket_metrics"], "alias": "consistency_ticket_metrics", "checksum": {"name": "sha256", "checksum": "e630be25d326f99cdad0ebc1d29e71dcd7514aa3e56c999e56d1ed15bc6c10e0"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.409955, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_prod.zendesk__ticket_metrics\n),\n\ndev as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics\n),\n\nfinal as (\n select \n prod.ticket_id,\n prod.first_reply_time_business_minutes as prod_first_reply_time_business_minutes,\n dev.first_reply_time_business_minutes as dev_first_reply_time_business_minutes,\n prod.first_reply_time_calendar_minutes as prod_first_reply_time_calendar_minutes,\n dev.first_reply_time_calendar_minutes as dev_first_reply_time_calendar_minutes\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n)\n\nselect *\nfrom final\nwhere (abs(prod_first_reply_time_business_minutes - dev_first_reply_time_business_minutes) >= 5\n or abs(prod_first_reply_time_calendar_minutes - dev_first_reply_time_calendar_minutes) >= 5)\n {{ \"and ticket_id not in \" ~ var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policy_count": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_sla_policy_count", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policy_count.sql", "original_file_path": "tests/consistency/consistency_sla_policy_count.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policy_count", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policy_count"], "alias": "consistency_sla_policy_count", "checksum": {"name": "sha256", "checksum": "b30a06ff7e3d392b2fdfa6b5f34633f6c7f8e018e31eef64fcdf2eeaffcae18a"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.427278, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n {{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) }}\n group by 1\n),\n\ndev as (\n select\n ticket_id,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n {{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) }}\n group by 1\n),\n\nfinal as (\n select \n prod.ticket_id as prod_ticket_id,\n dev.ticket_id as dev_ticket_id,\n prod.total_slas as prod_sla_total,\n dev.total_slas as dev_sla_total\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n)\n\nselect *\nfrom final\nwhere prod_sla_total != dev_sla_total", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policies": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_sla_policies", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policies.sql", "original_file_path": "tests/consistency/consistency_sla_policies.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policies", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policies"], "alias": "consistency_sla_policies", "checksum": {"name": "sha256", "checksum": "bdad5490a4a975665c4b658101726f92c08755dd96f6372d8606b47e60fe29d4"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.431509, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select \n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n round(sla_elapsed_time, -1) as sla_elapsed_time, --round to the nearest tens\n is_active_sla,\n is_sla_breach\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n),\n\ndev as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n round(sla_elapsed_time, -1) as sla_elapsed_time, --round to the nearest tens\n is_active_sla,\n is_sla_breach\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n),\n\nprod_not_in_dev as (\n -- rows from prod not found in dev\n select * from prod\n except distinct\n select * from dev\n),\n\ndev_not_in_prod as (\n -- rows from dev not found in prod\n select * from dev\n except distinct\n select * from prod\n),\n\nfinal as (\n select\n *,\n 'from prod' as source\n from prod_not_in_dev\n\n union all -- union since we only care if rows are produced\n\n select\n *,\n 'from dev' as source\n from dev_not_in_prod\n)\n\nselect *\nfrom final\n{{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policies_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policies_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.metrics_count_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "metrics_count_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/metrics_count_match.sql", "original_file_path": "tests/integrity/metrics_count_match.sql", "unique_id": "test.zendesk_integration_tests.metrics_count_match", "fqn": ["zendesk_integration_tests", "integrity", "metrics_count_match"], "alias": "metrics_count_match", "checksum": {"name": "sha256", "checksum": "a1b9b09d680906335f534a5707924cdd7975615c0f3192a51e790183e4625724"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.435719, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n-- check that all the tickets are accounted for in the metrics\nwith stg_count as (\n select\n count(*) as stg_ticket_count\n from {{ ref('stg_zendesk__ticket') }}\n),\n\nmetric_count as (\n select\n count(*) as metric_ticket_count\n from {{ ref('zendesk__ticket_metrics') }}\n)\n\nselect *\nfrom stg_count\njoin metric_count\n on stg_ticket_count != metric_ticket_count", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_metrics_parity": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_metrics_parity", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_metrics_parity.sql", "original_file_path": "tests/integrity/sla_metrics_parity.sql", "unique_id": "test.zendesk_integration_tests.sla_metrics_parity", "fqn": ["zendesk_integration_tests", "integrity", "sla_metrics_parity"], "alias": "sla_metrics_parity", "checksum": {"name": "sha256", "checksum": "d18407ef45d1ce6b2d4eeaca9286dfb8b3b1db85021e3fd69701fb0c33138675"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.439052, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n/*\nThis test is to ensure the sla_elapsed_time from zendesk__sla_policies matches the corresponding time in zendesk__ticket_metrics.\n*/\n\nwith dev_slas as (\n select *\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n where in_business_hours\n\n), dev_metrics as (\n select *\n from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics\n\n), dev_compare as (\n select \n dev_slas.ticket_id,\n dev_slas.metric,\n cast(dev_slas.sla_elapsed_time as {{ dbt.type_int() }}) as time_from_slas,\n case when metric = 'agent_work_time' then dev_metrics.agent_work_time_in_business_minutes\n when metric = 'requester_wait_time' then dev_metrics.requester_wait_time_in_business_minutes\n when metric = 'first_reply_time' then dev_metrics.first_reply_time_business_minutes\n end as time_from_metrics\n from dev_slas\n left join dev_metrics\n on dev_metrics.ticket_id = dev_slas.ticket_id\n)\n\nselect *\nfrom dev_compare\nwhere abs(time_from_slas - time_from_metrics) >= 5\n{{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_metric_parity_exclusion_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_metric_parity_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_first_reply_time_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_first_reply_time_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_first_reply_time_match.sql", "original_file_path": "tests/integrity/sla_first_reply_time_match.sql", "unique_id": "test.zendesk_integration_tests.sla_first_reply_time_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_first_reply_time_match"], "alias": "sla_first_reply_time_match", "checksum": {"name": "sha256", "checksum": "a94e41e1bdbc5f4cb6268590d22f37692a708dd7471344b09e2d29a4edf4ccea"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.443585, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith ticket_metrics as (\n select\n ticket_id,\n first_reply_time_business_minutes\n from {{ ref('zendesk__ticket_metrics') }}\n),\n\nsla_policies as (\n select\n ticket_id,\n sla_elapsed_time\n from {{ ref('zendesk__sla_policies') }}\n where metric = 'first_reply_time'\n and in_business_hours\n),\n\nmatch_check as (\n select \n ticket_metrics.ticket_id,\n ticket_metrics.first_reply_time_business_minutes,\n sla_policies.sla_elapsed_time\n from ticket_metrics\n full outer join sla_policies \n on ticket_metrics.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere abs(round(first_reply_time_business_minutes,0) - round(sla_elapsed_time,0)) >= 2\n {{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_count_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_count_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_count_match.sql", "original_file_path": "tests/integrity/sla_count_match.sql", "unique_id": "test.zendesk_integration_tests.sla_count_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_count_match"], "alias": "sla_count_match", "checksum": {"name": "sha256", "checksum": "b1f23baf0d04729d4855197e4e5f6e76bf72502c3739371ebee1a6d626a6d8b8"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.447191, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n-- The necessary source and source_filter adjustments used below originate from the int_zendesk__sla_policy_applied model\nwith source as (\n select\n *,\n case when field_name = 'first_reply_time' then row_number() over (partition by ticket_id, field_name order by valid_starting_at desc) else 1 end as latest_sla\n from {{ ref('stg_zendesk__ticket_field_history') }}\n),\n\nsource_filter as (\n select\n ticket_id,\n count(*) as source_row_count\n from source\n where field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n and value is not null\n and latest_sla = 1\n group by 1\n),\n\nsla_policies as (\n select\n ticket_id,\n count(*) as end_model_row_count\n from {{ ref('zendesk__sla_policies') }}\n group by 1\n),\n\nmatch_check as (\n select \n sla_policies.ticket_id,\n end_model_row_count,\n source_row_count\n from sla_policies\n full outer join source_filter\n on source_filter.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere end_model_row_count != source_row_count\n{{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_count_match_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_count_match_tickets',[]) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "seed.zendesk_integration_tests.organization_tag_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data_snowflake.csv", "original_file_path": "seeds/organization_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "organization_tag_data_snowflake"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "d9219b78d44b8b4620100b064a3af350fb5fa2046bdb0c376a09bade7a99f6f7"}, "config": {"enabled": false, "alias": "organization_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "organization_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1728492760.5301702, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.brand_data": [{"database": "postgres", "schema": "zz_zendesk", "name": "brand_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data.csv", "original_file_path": "seeds/brand_data.csv", "unique_id": "seed.zendesk_integration_tests.brand_data", "fqn": ["zendesk_integration_tests", "brand_data"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "203980ef5845715ee0758982a85b96a30c8e4b06fbda7f104705bd4cdd586aa9"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'postgres' else false }}"}, "created_at": 1728492760.537546, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.user_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "user_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data_snowflake.csv", "original_file_path": "seeds/user_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_data_snowflake", "fqn": ["zendesk_integration_tests", "user_data_snowflake"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "1d7712839e43bb49c4fb8a2bba60a98e8c3ea558c91a3d4fb4f4db6e1425f178"}, "config": {"enabled": false, "alias": "user_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "external_id": "bigint", "locale_id": "bigint", "organization_id": "bigint", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "alias": "user_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1728492760.539969, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.user_tag_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "user_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data_snowflake.csv", "original_file_path": "seeds/user_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "user_tag_data_snowflake"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "7c2274e05f81c1f9906a6a4a217c4493bf003a151402391069f49c64cf9ec5fb"}, "config": {"enabled": false, "alias": "user_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "user_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1728492760.5429592, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}]}, "parent_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.audit_log_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.brand_data_postgres": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__group"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.zendesk__ticket_summary": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.zendesk__sla_policies": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.zendesk__ticket_backlog": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__schedule"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__sla_policy_applied"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__updater_information", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_enriched", "source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__calendar_spine": ["model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__timezone_daylight": ["model.zendesk_source.stg_zendesk__daylight_time", "model.zendesk_source.stg_zendesk__time_zone"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.zendesk__document": ["model.zendesk.int_zendesk__ticket_comment_documents_grouped", "model.zendesk.int_zendesk__ticket_document"], "model.zendesk.int_zendesk__ticket_comment_documents_grouped": ["model.zendesk.int_zendesk__ticket_comment_document"], "model.zendesk.int_zendesk__ticket_comment_document": ["model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__ticket_document": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__updates": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__schedule_holiday", "model.zendesk.int_zendesk__schedule_timezones"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__schedule_timezones": ["model.zendesk.int_zendesk__schedule_history", "model.zendesk.int_zendesk__timezone_daylight", "model.zendesk_source.stg_zendesk__schedule"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__schedule_history": ["model.zendesk_source.stg_zendesk__audit_log"], "model.zendesk.int_zendesk__schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk_source.stg_zendesk__domain_name", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk_source.stg_zendesk__group_tmp"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk_source.stg_zendesk__user_tmp"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "model.zendesk_source.stg_zendesk__audit_log": ["model.zendesk_source.stg_zendesk__audit_log_tmp"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk_source.stg_zendesk__ticket_tmp"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["source.zendesk_source.zendesk.daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["source.zendesk_source.zendesk.user"], "model.zendesk_source.stg_zendesk__group_tmp": ["source.zendesk_source.zendesk.group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["source.zendesk_source.zendesk.ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["source.zendesk_source.zendesk.brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["source.zendesk_source.zendesk.ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["source.zendesk_source.zendesk.schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["source.zendesk_source.zendesk.user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["source.zendesk_source.zendesk.ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["source.zendesk_source.zendesk.ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["source.zendesk_source.zendesk.organization_tag"], "model.zendesk_source.stg_zendesk__audit_log_tmp": ["source.zendesk_source.zendesk.audit_log"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["source.zendesk_source.zendesk.schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["source.zendesk_source.zendesk.organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["source.zendesk_source.zendesk.ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["source.zendesk_source.zendesk.domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["source.zendesk_source.zendesk.time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": ["model.zendesk.zendesk__sla_policies"], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": ["model.zendesk_source.stg_zendesk__domain_name"], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": ["model.zendesk_source.stg_zendesk__daylight_time"], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "source.zendesk_source.zendesk.audit_log": [], "source.zendesk_source.zendesk.ticket": [], "source.zendesk_source.zendesk.brand": [], "source.zendesk_source.zendesk.domain_name": [], "source.zendesk_source.zendesk.group": [], "source.zendesk_source.zendesk.organization_tag": [], "source.zendesk_source.zendesk.organization": [], "source.zendesk_source.zendesk.ticket_comment": [], "source.zendesk_source.zendesk.user_tag": [], "source.zendesk_source.zendesk.user": [], "source.zendesk_source.zendesk.schedule": [], "source.zendesk_source.zendesk.ticket_schedule": [], "source.zendesk_source.zendesk.ticket_form_history": [], "source.zendesk_source.zendesk.ticket_tag": [], "source.zendesk_source.zendesk.ticket_field_history": [], "source.zendesk_source.zendesk.daylight_time": [], "source.zendesk_source.zendesk.time_zone": [], "source.zendesk_source.zendesk.schedule_holiday": []}, "child_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.audit_log_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.brand_data_postgres": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.zendesk__ticket_metrics", "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.zendesk__ticket_summary", "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c"], "model.zendesk.zendesk__ticket_summary": [], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.zendesk__ticket_backlog"], "model.zendesk.zendesk__sla_policies": ["test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd"], "model.zendesk.zendesk__ticket_backlog": [], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_reply_times"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__field_history_enriched"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__calendar_spine": ["model.zendesk.int_zendesk__field_calendar_spine"], "model.zendesk.int_zendesk__timezone_daylight": ["model.zendesk.int_zendesk__schedule_timezones"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.zendesk__document": [], "model.zendesk.int_zendesk__ticket_comment_documents_grouped": ["model.zendesk.zendesk__document"], "model.zendesk.int_zendesk__ticket_comment_document": ["model.zendesk.int_zendesk__ticket_comment_documents_grouped"], "model.zendesk.int_zendesk__ticket_document": ["model.zendesk.zendesk__document"], "model.zendesk.int_zendesk__updates": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__schedule_timezones": ["model.zendesk.int_zendesk__schedule_spine"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__schedule_history": ["model.zendesk.int_zendesk__schedule_timezones"], "model.zendesk.int_zendesk__schedule_holiday": ["model.zendesk.int_zendesk__schedule_spine"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk.int_zendesk__user_aggregates"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk.int_zendesk__ticket_aggregates"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_history_enriched", "model.zendesk.int_zendesk__updates"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk.int_zendesk__schedule_holiday", "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk.int_zendesk__timezone_daylight", "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk.int_zendesk__timezone_daylight", "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_enriched", "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk.int_zendesk__ticket_comment_document", "model.zendesk.int_zendesk__updates", "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__schedule_holiday", "model.zendesk.int_zendesk__schedule_timezones", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__ticket_comment_document", "model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_summary", "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk.int_zendesk__latest_ticket_form", "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17"], "model.zendesk_source.stg_zendesk__audit_log": ["model.zendesk.int_zendesk__schedule_history"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk.int_zendesk__organization_aggregates", "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk.int_zendesk__organization_aggregates"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__calendar_spine", "model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["model.zendesk_source.stg_zendesk__daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["model.zendesk_source.stg_zendesk__user"], "model.zendesk_source.stg_zendesk__group_tmp": ["model.zendesk_source.stg_zendesk__group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["model.zendesk_source.stg_zendesk__ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["model.zendesk_source.stg_zendesk__brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["model.zendesk_source.stg_zendesk__ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["model.zendesk_source.stg_zendesk__organization_tag"], "model.zendesk_source.stg_zendesk__audit_log_tmp": ["model.zendesk_source.stg_zendesk__audit_log"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["model.zendesk_source.stg_zendesk__schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["model.zendesk_source.stg_zendesk__organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["model.zendesk_source.stg_zendesk__domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": [], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": [], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": [], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": [], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": [], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": [], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": [], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": [], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": [], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": [], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": [], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": [], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": [], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": [], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": [], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": [], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": [], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": [], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": [], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": [], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": [], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": [], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": [], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": [], "source.zendesk_source.zendesk.audit_log": ["model.zendesk_source.stg_zendesk__audit_log_tmp"], "source.zendesk_source.zendesk.ticket": ["model.zendesk_source.stg_zendesk__ticket_tmp"], "source.zendesk_source.zendesk.brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "source.zendesk_source.zendesk.domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "source.zendesk_source.zendesk.group": ["model.zendesk_source.stg_zendesk__group_tmp"], "source.zendesk_source.zendesk.organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "source.zendesk_source.zendesk.organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "source.zendesk_source.zendesk.ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "source.zendesk_source.zendesk.user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "source.zendesk_source.zendesk.user": ["model.zendesk_source.stg_zendesk__user_tmp"], "source.zendesk_source.zendesk.schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "source.zendesk_source.zendesk.ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "source.zendesk_source.zendesk.ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "source.zendesk_source.zendesk.ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "source.zendesk_source.zendesk.ticket_field_history": ["model.zendesk.int_zendesk__field_history_pivot", "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "source.zendesk_source.zendesk.daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "source.zendesk_source.zendesk.time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "source.zendesk_source.zendesk.schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {}, "unit_tests": {}} \ No newline at end of file diff --git a/docs/run_results.json b/docs/run_results.json deleted file mode 100644 index 8506eaa7..00000000 --- a/docs/run_results.json +++ /dev/null @@ -1 +0,0 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v6.json", "dbt_version": "1.8.3", "generated_at": "2024-09-03T18:15:38.784874Z", "invocation_id": "ab89e8de-0760-4824-96db-0e8bd67c9f64", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.051976Z", "completed_at": "2024-09-03T18:15:33.664413Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:33.664698Z", "completed_at": "2024-09-03T18:15:33.664712Z"}], "thread_id": "Thread-5", "execution_time": 0.707880973815918, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__group_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"created_at\",\n \"name\",\n \"updated_at\",\n \"url\" \nfrom \"postgres\".\"zz_zendesk\".\"group_data\" as group_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.060129Z", "completed_at": "2024-09-03T18:15:33.682619Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:33.682912Z", "completed_at": "2024-09-03T18:15:33.682918Z"}], "thread_id": "Thread-7", "execution_time": 0.721423864364624, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"created_at\",\n \"details\",\n \"external_id\",\n \"group_id\",\n \"name\",\n \"notes\",\n \"shared_comments\",\n \"shared_tickets\",\n \"updated_at\",\n \"url\"\nfrom \"postgres\".\"zz_zendesk\".\"organization_data\" as organization_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.047802Z", "completed_at": "2024-09-03T18:15:33.729349Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:33.729937Z", "completed_at": "2024-09-03T18:15:33.729941Z"}], "thread_id": "Thread-4", "execution_time": 0.7912807464599609, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nselect \"index\",\n \"organization_id\",\n \"_fivetran_synced\",\n \"domain_name\" \nfrom \"postgres\".\"zz_zendesk\".\"domain_name_data\" as domain_name_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.043479Z", "completed_at": "2024-09-03T18:15:33.727344Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:33.729615Z", "completed_at": "2024-09-03T18:15:33.729620Z"}], "thread_id": "Thread-3", "execution_time": 0.792304277420044, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"year\",\n \"_fivetran_synced\",\n \"daylight_end_utc\",\n \"daylight_offset\",\n \"daylight_start_utc\"\nfrom \"postgres\".\"zz_zendesk\".\"daylight_time_data\" as daylight_time_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.064079Z", "completed_at": "2024-09-03T18:15:33.732360Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:33.732613Z", "completed_at": "2024-09-03T18:15:33.732618Z"}], "thread_id": "Thread-8", "execution_time": 0.7750880718231201, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"schedule_id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_date\",\n \"name\",\n \"start_date\"\nfrom \"postgres\".\"zz_zendesk\".\"schedule_holiday_data\" as schedule_holiday_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.020572Z", "completed_at": "2024-09-03T18:15:33.731734Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:33.732017Z", "completed_at": "2024-09-03T18:15:33.732022Z"}], "thread_id": "Thread-2", "execution_time": 0.7954747676849365, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"brand_url\",\n \"default\",\n \"has_help_center\",\n \"help_center_state\",\n \"logo_content_type\",\n \"logo_content_url\",\n \"logo_deleted\",\n \"logo_file_name\",\n \"logo_height\",\n \"logo_id\",\n \"logo_inline\",\n \"logo_mapped_content_url\",\n \"logo_size\",\n \"logo_url\",\n \"logo_width\",\n \"name\",\n \"subdomain\",\n \"url\" \nfrom \"postgres\".\"zz_zendesk\".\"brand_data\" as brand_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.056028Z", "completed_at": "2024-09-03T18:15:33.726377Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:33.726716Z", "completed_at": "2024-09-03T18:15:33.726723Z"}], "thread_id": "Thread-6", "execution_time": 0.7926380634307861, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nselect \"organization_id\",\n \"tag\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zz_zendesk\".\"organization_tag_data\" as organization_tag_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.712480Z", "completed_at": "2024-09-03T18:15:34.302258Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.302497Z", "completed_at": "2024-09-03T18:15:34.302504Z"}], "thread_id": "Thread-5", "execution_time": 0.6417891979217529, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"end_time\",\n \"id\",\n \"start_time\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_time_utc\",\n \"name\",\n \"start_time_utc\",\n \"time_zone\",\n \"created_at\"\nfrom \"postgres\".\"zz_zendesk\".\"schedule_data\" as schedule_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.742098Z", "completed_at": "2024-09-03T18:15:34.334917Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.335161Z", "completed_at": "2024-09-03T18:15:34.335165Z"}], "thread_id": "Thread-7", "execution_time": 0.641913652420044, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"body\",\n \"created\",\n \"facebook_comment\",\n \"public\",\n \"ticket_id\",\n \"tweet\",\n \"user_id\",\n \"voice_comment\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_comment_data\" as ticket_comment_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.857793Z", "completed_at": "2024-09-03T18:15:34.392928Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.393184Z", "completed_at": "2024-09-03T18:15:34.393190Z"}], "thread_id": "Thread-1", "execution_time": 0.5925579071044922, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"_fivetran_synced\",\n \"standard_offset\" \nfrom \"postgres\".\"zz_zendesk\".\"time_zone_data\" as time_zone_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.450326Z", "completed_at": "2024-09-03T18:15:34.456607Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.457126Z", "completed_at": "2024-09-03T18:15:34.457131Z"}], "thread_id": "Thread-1", "execution_time": 0.007464170455932617, "adapter_response": {}, "message": null, "failures": null, "unique_id": "operation.zendesk.zendesk-on-run-start-0", "compiled": true, "compiled_code": "\n\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.458588Z", "completed_at": "2024-09-03T18:15:34.460257Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.460494Z", "completed_at": "2024-09-03T18:15:34.460498Z"}], "thread_id": "Thread-1", "execution_time": 0.002452850341796875, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.brand_data_postgres", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.461630Z", "completed_at": "2024-09-03T18:15:34.463203Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.463430Z", "completed_at": "2024-09-03T18:15:34.463434Z"}], "thread_id": "Thread-1", "execution_time": 0.0023450851440429688, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.daylight_time_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.464536Z", "completed_at": "2024-09-03T18:15:34.466046Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.466272Z", "completed_at": "2024-09-03T18:15:34.466276Z"}], "thread_id": "Thread-1", "execution_time": 0.0022420883178710938, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.domain_name_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.467385Z", "completed_at": "2024-09-03T18:15:34.468923Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.469147Z", "completed_at": "2024-09-03T18:15:34.469151Z"}], "thread_id": "Thread-1", "execution_time": 0.0023069381713867188, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.group_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.470181Z", "completed_at": "2024-09-03T18:15:34.472221Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.472435Z", "completed_at": "2024-09-03T18:15:34.472439Z"}], "thread_id": "Thread-1", "execution_time": 0.002755880355834961, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.organization_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.473474Z", "completed_at": "2024-09-03T18:15:34.474885Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.475097Z", "completed_at": "2024-09-03T18:15:34.475101Z"}], "thread_id": "Thread-1", "execution_time": 0.002126932144165039, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.organization_tag_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.476116Z", "completed_at": "2024-09-03T18:15:34.477480Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.477684Z", "completed_at": "2024-09-03T18:15:34.477688Z"}], "thread_id": "Thread-1", "execution_time": 0.002064228057861328, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.schedule_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.478728Z", "completed_at": "2024-09-03T18:15:34.480023Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.480438Z", "completed_at": "2024-09-03T18:15:34.480441Z"}], "thread_id": "Thread-1", "execution_time": 0.0021982192993164062, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.799308Z", "completed_at": "2024-09-03T18:15:34.430197Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.430480Z", "completed_at": "2024-09-03T18:15:34.430486Z"}], "thread_id": "Thread-4", "execution_time": 0.6849889755249023, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp", "compiled": true, "compiled_code": "select \"field_name\",\n \"ticket_id\",\n \"updated\",\n \"_fivetran_synced\",\n \"user_id\",\n \"value\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_field_history_data\" as ticket_field_history_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.482048Z", "completed_at": "2024-09-03T18:15:34.483462Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.483985Z", "completed_at": "2024-09-03T18:15:34.483989Z"}], "thread_id": "Thread-1", "execution_time": 0.0029938220977783203, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_comment_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.484755Z", "completed_at": "2024-09-03T18:15:34.486731Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.487183Z", "completed_at": "2024-09-03T18:15:34.487187Z"}], "thread_id": "Thread-4", "execution_time": 0.0034973621368408203, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.487925Z", "completed_at": "2024-09-03T18:15:34.489228Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.489648Z", "completed_at": "2024-09-03T18:15:34.489651Z"}], "thread_id": "Thread-1", "execution_time": 0.0027408599853515625, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.490380Z", "completed_at": "2024-09-03T18:15:34.491651Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.492073Z", "completed_at": "2024-09-03T18:15:34.492077Z"}], "thread_id": "Thread-4", "execution_time": 0.002691984176635742, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.492802Z", "completed_at": "2024-09-03T18:15:34.494068Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.494515Z", "completed_at": "2024-09-03T18:15:34.494519Z"}], "thread_id": "Thread-1", "execution_time": 0.0027070045471191406, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.495213Z", "completed_at": "2024-09-03T18:15:34.496452Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.496870Z", "completed_at": "2024-09-03T18:15:34.496874Z"}], "thread_id": "Thread-4", "execution_time": 0.0026230812072753906, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_tag_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.497562Z", "completed_at": "2024-09-03T18:15:34.499408Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.499831Z", "completed_at": "2024-09-03T18:15:34.499835Z"}], "thread_id": "Thread-1", "execution_time": 0.003220081329345703, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.time_zone_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.500743Z", "completed_at": "2024-09-03T18:15:34.502004Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.502453Z", "completed_at": "2024-09-03T18:15:34.502457Z"}], "thread_id": "Thread-4", "execution_time": 0.0028810501098632812, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.user_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.851168Z", "completed_at": "2024-09-03T18:15:34.439530Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.440039Z", "completed_at": "2024-09-03T18:15:34.440044Z"}], "thread_id": "Thread-6", "execution_time": 0.7039308547973633, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"allow_channelback\",\n \"assignee_id\",\n \"brand_id\",\n \"created_at\",\n \"description\",\n \"due_at\",\n \"external_id\",\n \"forum_topic_id\",\n \"group_id\",\n \"has_incidents\",\n \"is_public\",\n \"organization_id\",\n \"priority\",\n \"problem_id\",\n \"recipient\",\n \"requester_id\",\n \"status\",\n \"subject\",\n \"submitter_id\",\n \"system_client\",\n \"ticket_form_id\",\n \"type\",\n \"updated_at\",\n \"url\",\n \"via_channel\",\n \"via_source_from_id\",\n \"via_source_from_title\",\n \"via_source_rel\",\n \"via_source_to_address\",\n \"via_source_to_name\",\n \"merged_ticket_ids\",\n \"via_source_from_address\",\n \"followup_ids\",\n \"via_followup_source_id\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_data\" as ticket_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.503453Z", "completed_at": "2024-09-03T18:15:34.504718Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.505379Z", "completed_at": "2024-09-03T18:15:34.505382Z"}], "thread_id": "Thread-1", "execution_time": 0.003222942352294922, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.user_tag_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.840577Z", "completed_at": "2024-09-03T18:15:34.456875Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.457553Z", "completed_at": "2024-09-03T18:15:34.457557Z"}], "thread_id": "Thread-8", "execution_time": 0.7179090976715088, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\n\n\nselect \"created_at\",\n \"ticket_id\",\n \"_fivetran_synced\",\n \"schedule_id\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_schedule_data\" as ticket_schedule_table\n\n", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.847473Z", "completed_at": "2024-09-03T18:15:34.439778Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.440296Z", "completed_at": "2024-09-03T18:15:34.440300Z"}], "thread_id": "Thread-2", "execution_time": 0.7181539535522461, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp", "compiled": true, "compiled_code": "select \"tag\",\n \"ticket_id\",\n \"_fivetran_synced\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_tag_data\" as ticket_tag_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:33.835962Z", "completed_at": "2024-09-03T18:15:34.438880Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.439187Z", "completed_at": "2024-09-03T18:15:34.439193Z"}], "thread_id": "Thread-3", "execution_time": 0.7244360446929932, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"updated_at\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"created_at\",\n \"display_name\",\n \"end_user_visible\",\n \"name\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_form_history_data\" as ticket_form_history_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.354933Z", "completed_at": "2024-09-03T18:15:34.877220Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.877983Z", "completed_at": "2024-09-03T18:15:34.877999Z"}], "thread_id": "Thread-5", "execution_time": 0.5833866596221924, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nselect \"tag\",\n \"user_id\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zz_zendesk\".\"user_tag_data\" as user_tag_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.384709Z", "completed_at": "2024-09-03T18:15:34.961785Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:34.962308Z", "completed_at": "2024-09-03T18:15:34.962324Z"}], "thread_id": "Thread-7", "execution_time": 0.636634111404419, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"active\",\n \"alias\",\n \"authenticity_token\",\n \"chat_only\",\n \"created_at\",\n \"details\",\n \"email\",\n \"external_id\",\n \"last_login_at\",\n \"locale\",\n \"locale_id\",\n \"moderator\",\n \"name\",\n \"notes\",\n \"only_private_comments\",\n \"organization_id\",\n \"phone\",\n \"remote_photo_url\",\n \"restricted_agent\",\n \"role\",\n \"shared\",\n \"shared_agent\",\n \"signature\",\n \"suspended\",\n \"ticket_restriction\",\n \"time_zone\",\n \"two_factor_auth_enabled\",\n \"updated_at\",\n \"url\",\n \"verified\" \nfrom \"postgres\".\"zz_zendesk\".\"user_data\" as user_table", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.506712Z", "completed_at": "2024-09-03T18:15:35.093064Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.093295Z", "completed_at": "2024-09-03T18:15:35.093302Z"}], "thread_id": "Thread-4", "execution_time": 0.6608479022979736, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__group", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.511541Z", "completed_at": "2024-09-03T18:15:35.115995Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.116232Z", "completed_at": "2024-09-03T18:15:35.116237Z"}], "thread_id": "Thread-6", "execution_time": 0.6837408542633057, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n shared_comments\n \n as \n \n shared_comments\n \n, \n \n \n shared_tickets\n \n as \n \n shared_tickets\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.533641Z", "completed_at": "2024-09-03T18:15:35.190712Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.196501Z", "completed_at": "2024-09-03T18:15:35.196506Z"}], "thread_id": "Thread-3", "execution_time": 0.7370569705963135, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__brand", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n brand_url\n \n as \n \n brand_url\n \n, \n \n \n has_help_center\n \n as \n \n has_help_center\n \n, \n \n \n help_center_state\n \n as \n \n help_center_state\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n logo_content_type\n \n as \n \n logo_content_type\n \n, \n \n \n logo_content_url\n \n as \n \n logo_content_url\n \n, \n \n \n logo_deleted\n \n as \n \n logo_deleted\n \n, \n \n \n logo_file_name\n \n as \n \n logo_file_name\n \n, \n \n \n logo_height\n \n as \n \n logo_height\n \n, \n \n \n logo_id\n \n as \n \n logo_id\n \n, \n \n \n logo_inline\n \n as \n \n logo_inline\n \n, \n \n \n logo_mapped_content_url\n \n as \n \n logo_mapped_content_url\n \n, \n \n \n logo_size\n \n as \n \n logo_size\n \n, \n \n \n logo_url\n \n as \n \n logo_url\n \n, \n \n \n logo_width\n \n as \n \n logo_width\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n subdomain\n \n as \n \n subdomain\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.524655Z", "completed_at": "2024-09-03T18:15:35.210169Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.210397Z", "completed_at": "2024-09-03T18:15:35.210402Z"}], "thread_id": "Thread-8", "execution_time": 0.7383081912994385, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n daylight_end_utc\n \n as \n \n daylight_end_utc\n \n, \n \n \n daylight_offset\n \n as \n \n daylight_offset\n \n, \n \n \n daylight_start_utc\n \n as \n \n daylight_start_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n year\n \n as \n \n year\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.529222Z", "completed_at": "2024-09-03T18:15:35.210671Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.210894Z", "completed_at": "2024-09-03T18:15:35.210898Z"}], "thread_id": "Thread-2", "execution_time": 0.7402949333190918, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n end_date\n \n as \n \n end_date\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n start_date\n \n as \n \n start_date\n \n\n\n\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as timestamp ) as _fivetran_synced,\n cast(end_date as timestamp ) as holiday_end_date_at,\n cast(id as TEXT ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as TEXT ) as schedule_id,\n cast(start_date as timestamp ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.517244Z", "completed_at": "2024-09-03T18:15:35.211138Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.211338Z", "completed_at": "2024-09-03T18:15:35.211341Z"}], "thread_id": "Thread-1", "execution_time": 0.7533109188079834, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__domain_name", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n domain_name\n \n as \n \n domain_name\n \n, \n \n \n index\n \n as \n \n index\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:34.941497Z", "completed_at": "2024-09-03T18:15:35.558150Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.558642Z", "completed_at": "2024-09-03T18:15:35.558657Z"}], "thread_id": "Thread-5", "execution_time": 0.6668610572814941, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.023529Z", "completed_at": "2024-09-03T18:15:35.751712Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.752249Z", "completed_at": "2024-09-03T18:15:35.752264Z"}], "thread_id": "Thread-7", "execution_time": 0.8128046989440918, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n end_time\n \n as \n \n end_time\n \n, \n \n \n end_time_utc\n \n as \n \n end_time_utc\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n start_time\n \n as \n \n start_time\n \n, \n \n \n start_time_utc\n \n as \n \n start_time_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as TEXT) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.190943Z", "completed_at": "2024-09-03T18:15:35.836032Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.837249Z", "completed_at": "2024-09-03T18:15:35.837255Z"}], "thread_id": "Thread-6", "execution_time": 0.7019541263580322, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__time_zone", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"\n\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n standard_offset\n \n as \n \n standard_offset\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 1\n )\n\n\n \n\n as integer ) * 60 +\n (cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 2\n )\n\n\n \n\n as integer ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.167334Z", "completed_at": "2024-09-03T18:15:35.837924Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.838584Z", "completed_at": "2024-09-03T18:15:35.838589Z"}], "thread_id": "Thread-4", "execution_time": 0.7260079383850098, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n body\n \n as \n \n body\n \n, \n cast(null as integer) as \n \n call_duration\n \n , \n cast(null as integer) as \n \n call_id\n \n , \n \n \n created\n \n as \n \n created\n \n, \n \n \n facebook_comment\n \n as \n \n facebook_comment\n \n, \n \n \n id\n \n as \n \n id\n \n, \n cast(null as integer) as \n \n location\n \n , \n \n \n public\n \n as \n \n public\n \n, \n cast(null as integer) as \n \n recording_url\n \n , \n cast(null as timestamp) as \n \n started_at\n \n , \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n cast(null as integer) as \n \n transcription_status\n \n , \n cast(null as integer) as \n \n transcription_text\n \n , \n cast(null as integer) as \n \n trusted\n \n , \n \n \n tweet\n \n as \n \n tweet\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n voice_comment\n \n as \n \n voice_comment\n \n, \n cast(null as integer) as \n \n voice_comment_transcription_visible\n \n \n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n _fivetran_deleted,\n body,\n cast(created as timestamp) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.912451Z", "completed_at": "2024-09-03T18:15:35.939075Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.939557Z", "completed_at": "2024-09-03T18:15:35.939560Z"}], "thread_id": "Thread-4", "execution_time": 0.04499077796936035, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect group_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.940880Z", "completed_at": "2024-09-03T18:15:35.945071Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.945281Z", "completed_at": "2024-09-03T18:15:35.945285Z"}], "thread_id": "Thread-4", "execution_time": 0.004884004592895508, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd", "compiled": true, "compiled_code": "\n \n \n\nselect\n group_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is not null\ngroup by group_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.946309Z", "completed_at": "2024-09-03T18:15:35.949059Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.949262Z", "completed_at": "2024-09-03T18:15:35.949266Z"}], "thread_id": "Thread-4", "execution_time": 0.003454923629760742, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.950228Z", "completed_at": "2024-09-03T18:15:35.953311Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.953511Z", "completed_at": "2024-09-03T18:15:35.953515Z"}], "thread_id": "Thread-4", "execution_time": 0.0037322044372558594, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31", "compiled": true, "compiled_code": "\n \n \n\nselect\n organization_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is not null\ngroup by organization_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.954478Z", "completed_at": "2024-09-03T18:15:35.957074Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.957272Z", "completed_at": "2024-09-03T18:15:35.957276Z"}], "thread_id": "Thread-4", "execution_time": 0.003237009048461914, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect brand_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.958258Z", "completed_at": "2024-09-03T18:15:35.960864Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.961061Z", "completed_at": "2024-09-03T18:15:35.961065Z"}], "thread_id": "Thread-4", "execution_time": 0.003264904022216797, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e", "compiled": true, "compiled_code": "\n \n \n\nselect\n brand_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is not null\ngroup by brand_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.962026Z", "completed_at": "2024-09-03T18:15:35.968143Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.968357Z", "completed_at": "2024-09-03T18:15:35.968361Z"}], "thread_id": "Thread-4", "execution_time": 0.006788730621337891, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d", "compiled": true, "compiled_code": "\n\n\n\n\n\nwith validation_errors as (\n\n select\n time_zone, year\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n group by time_zone, year\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.969340Z", "completed_at": "2024-09-03T18:15:35.972045Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.972257Z", "completed_at": "2024-09-03T18:15:35.972260Z"}], "thread_id": "Thread-4", "execution_time": 0.003367900848388672, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect holiday_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.973208Z", "completed_at": "2024-09-03T18:15:35.976611Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.976813Z", "completed_at": "2024-09-03T18:15:35.976817Z"}], "thread_id": "Thread-4", "execution_time": 0.004049777984619141, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a", "compiled": true, "compiled_code": "\n \n \n\nselect\n holiday_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is not null\ngroup by holiday_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.977829Z", "completed_at": "2024-09-03T18:15:35.980382Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.980580Z", "completed_at": "2024-09-03T18:15:35.980584Z"}], "thread_id": "Thread-4", "execution_time": 0.0032529830932617188, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"\nwhere organization_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.981521Z", "completed_at": "2024-09-03T18:15:35.986519Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.986727Z", "completed_at": "2024-09-03T18:15:35.986731Z"}], "thread_id": "Thread-4", "execution_time": 0.00564885139465332, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__organization_aggregates", "compiled": true, "compiled_code": "with organizations as (\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\n\n--If you use organization tags this will be included, if not it will be ignored.\n\n), organization_tags as (\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag\"\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(organization_tags.tags, ', ')\n\n as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n\n), domain_names as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(domain_names.domain_name, ', ')\n\n as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,tag_aggregates.organization_tags\n \n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,domain_aggregates.domain_names\n \n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n left join domain_aggregates\n using(organization_id)\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n left join tag_aggregates\n using(organization_id)\n \n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.278511Z", "completed_at": "2024-09-03T18:15:35.894978Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.900875Z", "completed_at": "2024-09-03T18:15:35.900881Z"}], "thread_id": "Thread-2", "execution_time": 0.7366960048675537, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as timestamp) as created_at,\n cast(schedule_id as TEXT) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.271572Z", "completed_at": "2024-09-03T18:15:35.939326Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.939958Z", "completed_at": "2024-09-03T18:15:35.939962Z"}], "thread_id": "Thread-8", "execution_time": 0.7662768363952637, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n allow_channelback\n \n as \n \n allow_channelback\n \n, \n \n \n assignee_id\n \n as \n \n assignee_id\n \n, \n \n \n brand_id\n \n as \n \n brand_id\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n description\n \n as \n \n description\n \n, \n \n \n due_at\n \n as \n \n due_at\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n forum_topic_id\n \n as \n \n forum_topic_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n has_incidents\n \n as \n \n has_incidents\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n is_public\n \n as \n \n is_public\n \n, \n \n \n merged_ticket_ids\n \n as \n \n merged_ticket_ids\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n priority\n \n as \n \n priority\n \n, \n \n \n problem_id\n \n as \n \n problem_id\n \n, \n \n \n recipient\n \n as \n \n recipient\n \n, \n \n \n requester_id\n \n as \n \n requester_id\n \n, \n \n \n status\n \n as \n \n status\n \n, \n \n \n subject\n \n as \n \n subject\n \n, \n \n \n submitter_id\n \n as \n \n submitter_id\n \n, \n cast(null as integer) as \n \n system_ccs\n \n , \n \n \n system_client\n \n as \n \n system_client\n \n, \n cast(null as TEXT) as \n \n system_ip_address\n \n , \n cast(null as integer) as \n \n system_json_email_identifier\n \n , \n cast(null as float) as \n \n system_latitude\n \n , \n cast(null as TEXT) as \n \n system_location\n \n , \n cast(null as float) as \n \n system_longitude\n \n , \n cast(null as integer) as \n \n system_machine_generated\n \n , \n cast(null as integer) as \n \n system_message_id\n \n , \n cast(null as integer) as \n \n system_raw_email_identifier\n \n , \n \n \n ticket_form_id\n \n as \n \n ticket_form_id\n \n, \n \n \n type\n \n as \n \n type\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n via_channel\n \n as \n \n via_channel\n \n, \n \n \n via_source_from_address\n \n as \n \n via_source_from_address\n \n, \n \n \n via_source_from_id\n \n as \n \n via_source_from_id\n \n, \n \n \n via_source_from_title\n \n as \n \n via_source_from_title\n \n, \n \n \n via_source_rel\n \n as \n \n via_source_rel\n \n, \n \n \n via_source_to_address\n \n as \n \n via_source_to_address\n \n, \n \n \n via_source_to_name\n \n as \n \n via_source_to_name\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n _fivetran_deleted,\n assignee_id,\n brand_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.265387Z", "completed_at": "2024-09-03T18:15:35.921156Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.938412Z", "completed_at": "2024-09-03T18:15:35.938418Z"}], "thread_id": "Thread-3", "execution_time": 0.7711751461029053, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n field_name\n \n as \n \n field_name\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n updated\n \n as \n \n updated\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n value\n \n as \n \n value\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as timestamp) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as timestamp) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.283563Z", "completed_at": "2024-09-03T18:15:35.937951Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:35.938694Z", "completed_at": "2024-09-03T18:15:35.938698Z"}], "thread_id": "Thread-1", "execution_time": 0.7607150077819824, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n \n tag as tags\n \n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.014297Z", "completed_at": "2024-09-03T18:15:36.047609Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.047837Z", "completed_at": "2024-09-03T18:15:36.047842Z"}], "thread_id": "Thread-2", "execution_time": 0.03402376174926758, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_comment_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.030579Z", "completed_at": "2024-09-03T18:15:36.061222Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.061462Z", "completed_at": "2024-09-03T18:15:36.061467Z"}], "thread_id": "Thread-8", "execution_time": 0.03137993812561035, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect time_zone\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.038564Z", "completed_at": "2024-09-03T18:15:36.114842Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.115103Z", "completed_at": "2024-09-03T18:15:36.115110Z"}], "thread_id": "Thread-1", "execution_time": 0.07703018188476562, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf", "compiled": true, "compiled_code": "\n \n \n\nselect\n time_zone as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is not null\ngroup by time_zone\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.062462Z", "completed_at": "2024-09-03T18:15:36.138749Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.145530Z", "completed_at": "2024-09-03T18:15:36.145537Z"}], "thread_id": "Thread-8", "execution_time": 0.0835568904876709, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_aggregates", "compiled": true, "compiled_code": "with tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_tags as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag\"\n\n), brands as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n \n string_agg(ticket_tags.tags, ', ')\n\n as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.034997Z", "completed_at": "2024-09-03T18:15:36.145289Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.154306Z", "completed_at": "2024-09-03T18:15:36.154310Z"}], "thread_id": "Thread-3", "execution_time": 0.11978816986083984, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_comment_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is not null\ngroup by ticket_comment_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.055488Z", "completed_at": "2024-09-03T18:15:36.154647Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.155420Z", "completed_at": "2024-09-03T18:15:36.155424Z"}], "thread_id": "Thread-2", "execution_time": 0.10040497779846191, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__field_calendar_spine", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1671\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), calendar as (\n\n select *\n from __dbt__cte__int_zendesk__calendar_spine\n \n where date_day >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\")\n \n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( date_trunc('day', case when status != 'closed' then \n current_timestamp::timestamp\n else updated_at end) as date) as open_until\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and \n\n ticket.open_until + ((interval '1 month') * (0))\n\n >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.987692Z", "completed_at": "2024-09-03T18:15:36.156004Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.156891Z", "completed_at": "2024-09-03T18:15:36.156895Z"}], "thread_id": "Thread-4", "execution_time": 0.16964197158813477, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__schedule_spine", "compiled": true, "compiled_code": "\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings.\n End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time)\n*/\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1671\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n-- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules.\n), schedule_holiday as ( \n\n select\n _fivetran_synced,\n cast(date_day as timestamp ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n cast(date_day as timestamp ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n holiday_id,\n holiday_name,\n schedule_id\n\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\" \n inner join __dbt__cte__int_zendesk__calendar_spine \n on holiday_start_date_at <= cast(date_day as timestamp )\n and holiday_end_date_at >= cast(date_day as timestamp )\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard schedule (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n \n current_timestamp::timestamp\n + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT schedule (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n \n current_timestamp::timestamp\n + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(\n current_timestamp::timestamp\n as date)\n\n), calculate_schedules as (\n\n select \n schedule.schedule_id,\n schedule.time_zone,\n schedule.start_time,\n schedule.end_time,\n schedule.created_at,\n schedule.schedule_name,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes_to_add,\n -- we'll use these to determine which schedule version to associate tickets with\n cast(split_timezones.valid_from as timestamp) as valid_from,\n cast(split_timezones.valid_until as timestamp) as valid_until\n\n from schedule\n left join split_timezones\n on split_timezones.time_zone = schedule.time_zone\n\n-- Now we need take holiday's into consideration and perform the following transformations to account for Holidays in existing schedules\n), holiday_start_end_times as (\n\n select\n calculate_schedules.*,\n schedule_holiday.holiday_name,\n schedule_holiday.holiday_start_date_at,\n cast(\n\n schedule_holiday.holiday_end_date_at + ((interval '1 second') * (86400))\n\n as timestamp) as holiday_end_date_at, -- add 24*60*60 seconds\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_holiday.holiday_start_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_week_start,\n cast(cast(\n\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_holiday.holiday_end_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) + ((interval '1 day') * (6))\n\n as date) as timestamp) as holiday_week_end\n from schedule_holiday\n inner join calculate_schedules\n on calculate_schedules.schedule_id = schedule_holiday.schedule_id\n and schedule_holiday.holiday_start_date_at >= calculate_schedules.valid_from \n and schedule_holiday.holiday_start_date_at < calculate_schedules.valid_until\n\n-- Let's calculate the start and end date of the Holiday in terms of minutes from Sunday (like other Zendesk schedules)\n), holiday_minutes as(\n\n select\n holiday_start_end_times.*,\n \n (\n (\n ((holiday_start_date_at)::date - (holiday_week_start)::date)\n * 24 + date_part('hour', (holiday_start_date_at)::timestamp) - date_part('hour', (holiday_week_start)::timestamp))\n * 60 + date_part('minute', (holiday_start_date_at)::timestamp) - date_part('minute', (holiday_week_start)::timestamp))\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_start,\n \n (\n (\n ((holiday_end_date_at)::date - (holiday_week_start)::date)\n * 24 + date_part('hour', (holiday_end_date_at)::timestamp) - date_part('hour', (holiday_week_start)::timestamp))\n * 60 + date_part('minute', (holiday_end_date_at)::timestamp) - date_part('minute', (holiday_week_start)::timestamp))\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_end\n from holiday_start_end_times\n left join timezone\n on timezone.time_zone = holiday_start_end_times.time_zone\n\n-- Determine which schedule days include a holiday\n), holiday_check as (\n\n select\n *,\n case when minutes_from_sunday_start < start_time_utc and minutes_from_sunday_end > end_time_utc \n then holiday_name \n end as holiday_name_check\n from holiday_minutes\n\n-- Consolidate the holiday records that were just created\n), holiday_consolidated as (\n\n select \n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n cast(\n\n holiday_week_end + ((interval '1 second') * (86400))\n\n as timestamp) as holiday_week_end,\n max(holiday_name_check) as holiday_name_check\n from holiday_check\n group by 1,2,3,4,5,6,7,8,9\n\n-- Since we have holiday schedules and normal schedules, we need to union them into a holistic schedule spine\n), spine_union as (\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n holiday_week_end,\n holiday_name_check\n from holiday_consolidated\n\n union all\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n null as holiday_week_start,\n null as holiday_week_end,\n null as holiday_name_check\n from calculate_schedules\n\n-- Now that we have an understanding of which weeks are holiday's let's consolidate them with non holiday weeks\n), all_periods as (\n\n select distinct\n schedule_id,\n holiday_week_start as period_start,\n holiday_week_end as period_end,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n true as is_holiday_week\n from spine_union\n where holiday_week_start is not null\n and holiday_week_end is not null\n\n union all\n\n select distinct\n schedule_id,\n valid_from as period_start,\n valid_until as period_end,\n start_time_utc,\n end_time_utc,\n cast(null as TEXT) as holiday_name_check,\n false as is_holiday_week\n from spine_union\n\n-- We have holiday and non holiday schedules together, now let's sort them to understand the previous end and next start of neighboring schedules\n), sorted_periods as (\n\n select distinct\n *,\n lag(period_end) over (partition by schedule_id order by period_start, start_time_utc) as prev_end,\n lead(period_start) over (partition by schedule_id order by period_start, start_time_utc) as next_start\n from all_periods\n\n-- We need to adjust some non holiday schedules in order to properly fill holiday gaps in the schedules later down the transformation\n), non_holiday_period_adjustments as (\n\n select\n schedule_id, \n period_start, \n period_end,\n prev_end,\n next_start,\n -- taking first_value/last_value because prev_end and next_start are inconsistent within the schedule partitions -- they all include a record that is outside the partition. so we need to ignore those erroneous records that slip in\n coalesce(greatest(case \n when not is_holiday_week and prev_end is not null then first_value(prev_end) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_start\n end, period_start), period_start) as valid_from,\n coalesce(case \n when not is_holiday_week and next_start is not null then last_value(next_start) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_end\n end, period_end) as valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from sorted_periods\n\n-- A few window function results will be leveraged downstream. Let's generate them now.\n), gap_starter as (\n select \n *,\n max(period_end) over (partition by schedule_id) as max_valid_until,\n last_value(next_start) over (partition by schedule_id, period_start order by valid_until rows between unbounded preceding and unbounded following) as lead_next_start,\n first_value(prev_end) over (partition by schedule_id, valid_from order by start_time_utc rows between unbounded preceding and unbounded following) as first_prev_end\n from non_holiday_period_adjustments\n\n-- There may be gaps in holiday and non holiday schedules, so we need to identify where these gaps are\n), gap_adjustments as(\n\n select \n *,\n -- In order to identify the gaps we check to see if the valid_from and previous valid_until are right next to one. If we add two hours to the previous valid_until it should always be greater than the current valid_from.\n -- However, if the valid_from is greater instead then we can identify that this period has a gap that needs to be filled.\n case \n when cast(\n\n valid_until + ((interval '1 hour') * (2))\n\n as timestamp) < cast(lead_next_start as timestamp)\n then 'gap'\n when (lead_next_start is null and valid_from < max_valid_until and period_end != max_valid_until)\n then 'gap'\n else null\n end as is_schedule_gap\n\n from gap_starter\n\n-- We know where the gaps are, so now lets prime the data to fill those gaps\n), schedule_spine_primer as (\n\n select \n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n lead_next_start,\n max_valid_until,\n holiday_name_check,\n is_holiday_week,\n max(is_schedule_gap) over (partition by schedule_id, valid_until) as is_gap_period,\n lead(valid_from) over (partition by schedule_id order by valid_from, start_time_utc) as fill_primer\n from gap_adjustments\n\n-- We know the gaps and where they are, so let's fill them with the following union\n), final_union as (\n\n -- For all gap periods, let's properly create a schedule filled before the holiday.\n select \n schedule_id,\n valid_until as valid_from,\n coalesce(last_value(fill_primer) over (partition by schedule_id, valid_until order by start_time_utc rows between unbounded preceding and unbounded following), max_valid_until) as valid_until,\n start_time_utc, \n end_time_utc, \n cast(null as TEXT) as holiday_name_check,\n false as is_holiday_week\n from schedule_spine_primer\n where is_gap_period is not null\n\n union all\n\n -- Fill all other normal schedules.\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from schedule_spine_primer\n\n-- We can finally filter out the holiday_name_check results as the gap filling properly filled in the gaps for holidays\n), final as(\n\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n is_holiday_week\n from final_union\n where holiday_name_check is null\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.157606Z", "completed_at": "2024-09-03T18:15:36.166595Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.166809Z", "completed_at": "2024-09-03T18:15:36.166814Z"}], "thread_id": "Thread-8", "execution_time": 0.01108098030090332, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__updates", "compiled": true, "compiled_code": "with ticket_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), ticket_comment as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as TEXT) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.163981Z", "completed_at": "2024-09-03T18:15:36.167236Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.167464Z", "completed_at": "2024-09-03T18:15:36.167468Z"}], "thread_id": "Thread-2", "execution_time": 0.010128021240234375, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.160358Z", "completed_at": "2024-09-03T18:15:36.167942Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.168135Z", "completed_at": "2024-09-03T18:15:36.168138Z"}], "thread_id": "Thread-3", "execution_time": 0.011996984481811523, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.169166Z", "completed_at": "2024-09-03T18:15:36.172619Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.175452Z", "completed_at": "2024-09-03T18:15:36.175457Z"}], "thread_id": "Thread-4", "execution_time": 0.006746053695678711, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__assignee_updates", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__assignee_updates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.173225Z", "completed_at": "2024-09-03T18:15:36.184964Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.185614Z", "completed_at": "2024-09-03T18:15:36.185619Z"}], "thread_id": "Thread-8", "execution_time": 0.013932228088378906, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_updates", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_updates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.176032Z", "completed_at": "2024-09-03T18:15:36.185175Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.185976Z", "completed_at": "2024-09-03T18:15:36.185982Z"}], "thread_id": "Thread-2", "execution_time": 0.013846874237060547, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__sla_policy_applied", "compiled": true, "compiled_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), sla_policy_name as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast(\n\n ticket_field_history.value::json #>> '{minutes}'\n\n as integer ) as target,\n \n\n ticket_field_history.value::json #>> '{in_business_hours}'\n\n = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, \n current_timestamp::timestamp\n) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.180980Z", "completed_at": "2024-09-03T18:15:36.186299Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.189228Z", "completed_at": "2024-09-03T18:15:36.189232Z"}], "thread_id": "Thread-3", "execution_time": 0.01648688316345215, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee", "compiled": true, "compiled_code": "with assignee_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then \n (\n (\n (\n ((valid_starting_at)::date - (coalesce(previous_update, ticket_created_date))::date)\n * 24 + date_part('hour', (valid_starting_at)::timestamp) - date_part('hour', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + date_part('minute', (valid_starting_at)::timestamp) - date_part('minute', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + floor(date_part('second', (valid_starting_at)::timestamp)) - floor(date_part('second', (coalesce(previous_update, ticket_created_date))::timestamp)))\n / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n group by 1,2,3,4,5,6\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.186495Z", "completed_at": "2024-09-03T18:15:36.190651Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.193516Z", "completed_at": "2024-09-03T18:15:36.193521Z"}], "thread_id": "Thread-4", "execution_time": 0.00894618034362793, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_group", "compiled": true, "compiled_code": "with ticket_group_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.191241Z", "completed_at": "2024-09-03T18:15:36.201290Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.201704Z", "completed_at": "2024-09-03T18:15:36.201709Z"}], "thread_id": "Thread-8", "execution_time": 0.012079000473022461, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction", "compiled": true, "compiled_code": "with satisfaction_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.193890Z", "completed_at": "2024-09-03T18:15:36.201495Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.202089Z", "completed_at": "2024-09-03T18:15:36.202093Z"}], "thread_id": "Thread-2", "execution_time": 0.012202262878417969, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_status", "compiled": true, "compiled_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n \n (\n (\n ((coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.196775Z", "completed_at": "2024-09-03T18:15:36.202489Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.202812Z", "completed_at": "2024-09-03T18:15:36.202816Z"}], "thread_id": "Thread-3", "execution_time": 0.011848926544189453, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours", "compiled": true, "compiled_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), final as (\n select\n *,\n \n\n sla_applied_at + ((interval '1 minute') * (cast(target as integer )))\n\n as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.205463Z", "completed_at": "2024-09-03T18:15:36.218744Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.218991Z", "completed_at": "2024-09-03T18:15:36.218997Z"}], "thread_id": "Thread-3", "execution_time": 0.015223026275634766, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "compiled": true, "compiled_code": "with agent_work_time_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n \n current_timestamp::timestamp\n + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.208355Z", "completed_at": "2024-09-03T18:15:36.223649Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.223884Z", "completed_at": "2024-09-03T18:15:36.223887Z"}], "thread_id": "Thread-4", "execution_time": 0.01967906951904297, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "compiled": true, "compiled_code": "with requester_wait_time_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n \n current_timestamp::timestamp\n + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.220174Z", "completed_at": "2024-09-03T18:15:36.224921Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.225359Z", "completed_at": "2024-09-03T18:15:36.225363Z"}], "thread_id": "Thread-3", "execution_time": 0.0056629180908203125, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "compiled": true, "compiled_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.225961Z", "completed_at": "2024-09-03T18:15:36.229068Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.229276Z", "completed_at": "2024-09-03T18:15:36.229280Z"}], "thread_id": "Thread-8", "execution_time": 0.004207134246826172, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "compiled": true, "compiled_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.610858Z", "completed_at": "2024-09-03T18:15:36.336273Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.336588Z", "completed_at": "2024-09-03T18:15:36.336596Z"}], "thread_id": "Thread-5", "execution_time": 0.7943947315216064, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n display_name\n \n as \n \n display_name\n \n, \n \n \n end_user_visible\n \n as \n \n end_user_visible\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.412590Z", "completed_at": "2024-09-03T18:15:36.420027Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.420660Z", "completed_at": "2024-09-03T18:15:36.420666Z"}], "thread_id": "Thread-4", "execution_time": 0.014436960220336914, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_form_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\nwhere ticket_form_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.407905Z", "completed_at": "2024-09-03T18:15:36.420378Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.421175Z", "completed_at": "2024-09-03T18:15:36.421179Z"}], "thread_id": "Thread-2", "execution_time": 0.015795230865478516, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__latest_ticket_form", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith ticket_form_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__latest_ticket_form\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.838865Z", "completed_at": "2024-09-03T18:15:36.483436Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.483783Z", "completed_at": "2024-09-03T18:15:36.483789Z"}], "thread_id": "Thread-7", "execution_time": 0.7014970779418945, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user_tag", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.116140Z", "completed_at": "2024-09-03T18:15:36.647831Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.648210Z", "completed_at": "2024-09-03T18:15:36.648221Z"}], "thread_id": "Thread-1", "execution_time": 0.5332798957824707, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_schedules", "compiled": true, "compiled_code": "\n\nwith ticket as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_schedule as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule\"\n\n), schedule as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n\n\n \n\n \n\n \n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '360000310393' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and \n\n first_schedule.created_at + ((interval '1 second') * (-5))\n\n <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , \n\n \n current_timestamp::timestamp\n + ((interval '1 hour') * (1000))\n\n ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:35.906011Z", "completed_at": "2024-09-03T18:15:36.627484Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.627833Z", "completed_at": "2024-09-03T18:15:36.627841Z"}], "thread_id": "Thread-6", "execution_time": 0.8786129951477051, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n active\n \n as \n \n active\n \n, \n \n \n alias\n \n as \n \n alias\n \n, \n \n \n authenticity_token\n \n as \n \n authenticity_token\n \n, \n \n \n chat_only\n \n as \n \n chat_only\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n email\n \n as \n \n email\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n last_login_at\n \n as \n \n last_login_at\n \n, \n \n \n locale\n \n as \n \n locale\n \n, \n \n \n locale_id\n \n as \n \n locale_id\n \n, \n \n \n moderator\n \n as \n \n moderator\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n only_private_comments\n \n as \n \n only_private_comments\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n phone\n \n as \n \n phone\n \n, \n \n \n remote_photo_url\n \n as \n \n remote_photo_url\n \n, \n \n \n restricted_agent\n \n as \n \n restricted_agent\n \n, \n \n \n role\n \n as \n \n role\n \n, \n \n \n shared\n \n as \n \n shared\n \n, \n \n \n shared_agent\n \n as \n \n shared_agent\n \n, \n \n \n signature\n \n as \n \n signature\n \n, \n \n \n suspended\n \n as \n \n suspended\n \n, \n \n \n ticket_restriction\n \n as \n \n ticket_restriction\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n two_factor_auth_enabled\n \n as \n \n two_factor_auth_enabled\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n verified\n \n as \n \n verified\n \n\n\n\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n _fivetran_deleted,\n cast(last_login_at as timestamp) as last_login_at,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n email,\n name,\n organization_id,\n phone,\n role,\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.664035Z", "completed_at": "2024-09-03T18:15:36.734246Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.776904Z", "completed_at": "2024-09-03T18:15:36.776909Z"}], "thread_id": "Thread-5", "execution_time": 0.12550020217895508, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours", "compiled": true, "compiled_code": "\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes\n from weekly_period_agent_work_time\n left join schedule\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n \n\n date_trunc('week', valid_starting_at) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom agent_work_business_breach", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.674882Z", "completed_at": "2024-09-03T18:15:36.776613Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.777568Z", "completed_at": "2024-09-03T18:15:36.777572Z"}], "thread_id": "Thread-3", "execution_time": 0.1251850128173828, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours", "compiled": true, "compiled_code": "\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes\n from weekly_period_requester_wait_time\n left join schedule\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n \n\n date_trunc('week', valid_starting_at) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.797230Z", "completed_at": "2024-09-03T18:15:36.808173Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.809003Z", "completed_at": "2024-09-03T18:15:36.809011Z"}], "thread_id": "Thread-7", "execution_time": 0.02866983413696289, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__user_aggregates", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n--If you use user tags this will be included, if not it will be ignored.\n\n), user_tags as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag\"\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n \n string_agg(user_tags.tags, ', ')\n\n as user_tags\n from user_tags\n group by 1\n\n\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,user_tag_aggregate.user_tags\n \n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n \n left join user_tag_aggregate\n using(user_id)\n \n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.792290Z", "completed_at": "2024-09-03T18:15:36.808494Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.809875Z", "completed_at": "2024-09-03T18:15:36.809879Z"}], "thread_id": "Thread-5", "execution_time": 0.029740095138549805, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_document", "compiled": true, "compiled_code": "\n\nwith tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), ticket_details as (\n select\n tickets.ticket_id,\n tickets.subject AS ticket_name,\n \n coalesce(\n cast(users.name as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as user_name,\n \n coalesce(\n cast(users.email as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as created_by,\n tickets.created_at AS created_on,\n \n coalesce(\n cast(tickets.status as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as status,\n \n coalesce(\n cast(tickets.priority as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as priority\n from tickets\n left join users\n on tickets.requester_id = users.user_id\n where not coalesce(tickets._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), final as (\n select\n ticket_id,\n '# Ticket : ' || ticket_name || '\\n\\n' || 'Created By : ' || user_name || ' (' || created_by || ')\\n' || 'Created On : ' || created_on || '\\n' || 'Status : ' || status || '\\n' || 'Priority : ' || priority as ticket_markdown\n from ticket_details\n)\n\nselect \n *,\n \n \n\n length(\n ticket_markdown\n ) / 4\n as ticket_tokens\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_document\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.782523Z", "completed_at": "2024-09-03T18:15:36.808786Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.810254Z", "completed_at": "2024-09-03T18:15:36.810258Z"}], "thread_id": "Thread-1", "execution_time": 0.030351877212524414, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_comment_document", "compiled": true, "compiled_code": "\n\nwith ticket_comments as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), comment_details as (\n select \n ticket_comments.ticket_comment_id,\n ticket_comments.ticket_id,\n \n coalesce(\n cast(users.email as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as commenter_email,\n \n coalesce(\n cast(users.name as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as commenter_name,\n ticket_comments.created_at as comment_time,\n ticket_comments.body as comment_body\n from ticket_comments\n left join users\n on ticket_comments.user_id = users.user_id\n where not coalesce(ticket_comments._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), comment_markdowns as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n cast(\n '### message from ' || commenter_name || ' (' || commenter_email || ')\\n' || '##### sent @ ' || comment_time || '\\n' || comment_body as TEXT)\n as comment_markdown\n from comment_details\n\n), comments_tokens as (\n select\n *,\n \n \n\n length(\n comment_markdown\n ) / 4\n as comment_tokens\n from comment_markdowns\n\n), truncated_comments as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n case when comment_tokens > 5000 then left(comment_markdown, 5000 * 4) -- approximate 4 characters per token\n else comment_markdown\n end as comment_markdown,\n case when comment_tokens > 5000 then 5000\n else comment_tokens\n end as comment_tokens\n from comments_tokens\n)\n\nselect *\nfrom truncated_comments", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_document\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.800060Z", "completed_at": "2024-09-03T18:15:36.809426Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.811114Z", "completed_at": "2024-09-03T18:15:36.811117Z"}], "thread_id": "Thread-8", "execution_time": 0.03054213523864746, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect user_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.802544Z", "completed_at": "2024-09-03T18:15:36.809656Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.812165Z", "completed_at": "2024-09-03T18:15:36.812168Z"}], "thread_id": "Thread-3", "execution_time": 0.03138613700866699, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11", "compiled": true, "compiled_code": "\n \n \n\nselect\n user_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is not null\ngroup by user_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.818808Z", "completed_at": "2024-09-03T18:15:36.865454Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.865693Z", "completed_at": "2024-09-03T18:15:36.865699Z"}], "thread_id": "Thread-4", "execution_time": 0.05210709571838379, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours", "compiled": true, "compiled_code": "\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), sla_policy_applied as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), ticket_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n (\n (\n (\n (\n ((cast(sla_policy_applied.sla_applied_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and \n\n ticket_schedules.schedule_created_at + ((interval '1 second') * (-1))\n\n <= sla_policy_applied.sla_applied_at\n and \n\n ticket_schedules.schedule_invalidated_at + ((interval '1 second') * (-1))\n\n > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10,11,12,13,14\n\n), week_index_calc as (\n select \n *,\n \n (\n ((least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::date - (sla_applied_at)::date)\n / 7 + case\n when date_part('dow', (sla_applied_at)::timestamp) <= date_part('dow', (least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::timestamp) then\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 0 else -1 end\n else\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 1 else 0 end\n end)\n + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as integer) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast((7*24*60) as integer) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n date_trunc('week', sla_applied_at) as starting_point,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as integer )))\n\n as sla_breach_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_start_time) as integer )))\n\n as sla_schedule_start_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time) as integer )))\n\n as sla_schedule_end_at,\n cast(\n\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) + ((interval '1 day') * (6))\n\n as date) as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_business_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.830781Z", "completed_at": "2024-09-03T18:15:36.866813Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.867910Z", "completed_at": "2024-09-03T18:15:36.867914Z"}], "thread_id": "Thread-7", "execution_time": 0.054084062576293945, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_enriched", "compiled": true, "compiled_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n\n), latest_ticket_form as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__latest_ticket_form\"\n\n\n), latest_satisfaction_ratings as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), requester_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_updates\"\n\n), assignee_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__assignee_updates\"\n\n), ticket_group as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\n\n), organization as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n latest_ticket_form.name as ticket_form_name,\n \n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n \n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n requester_org.organization_tags as requester_organization_tags,\n \n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n \n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n \n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n \n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.841029Z", "completed_at": "2024-09-03T18:15:36.867054Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.868741Z", "completed_at": "2024-09-03T18:15:36.868744Z"}], "thread_id": "Thread-1", "execution_time": 0.0544431209564209, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__comment_metrics", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__comment_metrics\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.835216Z", "completed_at": "2024-09-03T18:15:36.868535Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.870075Z", "completed_at": "2024-09-03T18:15:36.870078Z"}], "thread_id": "Thread-5", "execution_time": 0.05599617958068848, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_comment_documents_grouped", "compiled": true, "compiled_code": "\n\nwith filtered_comment_documents as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_document\"\n),\n\ngrouped_comment_documents as (\n select \n ticket_id,\n comment_markdown,\n comment_tokens,\n comment_time,\n sum(comment_tokens) over (\n partition by ticket_id \n order by comment_time\n rows between unbounded preceding and current row\n ) as cumulative_length\n from filtered_comment_documents\n)\n\nselect \n ticket_id,\n cast(\n ( floor(cumulative_length - 1) ) / nullif( ( 5000 ), 0)\n as integer) as chunk_index,\n \n string_agg(\n comment_markdown,\n '\\n\\n---\\n\\n'\n order by comment_time\n ) as comments_group_markdown,\n sum(comment_tokens) as chunk_tokens\nfrom grouped_comment_documents\ngroup by 1,2", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_documents_grouped\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.876793Z", "completed_at": "2024-09-03T18:15:36.886153Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.917540Z", "completed_at": "2024-09-03T18:15:36.917546Z"}], "thread_id": "Thread-3", "execution_time": 0.05000495910644531, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__reply_time_combined", "compiled": true, "compiled_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"\n\n\n\n), reply_time_business_hours_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"\n\n\n\n), ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as numeric(28,6)) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as numeric(28,6)) as week_number,\n cast(null as numeric(28,6)) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n \n (\n (\n (\n ((agent_reply_at)::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (agent_reply_at)::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (agent_reply_at)::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (agent_reply_at)::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and now() >= sla_schedule_start_at and (now() < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= now()) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n now() as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + (\n (\n (\n (\n ((coalesce(agent_reply_at, next_solved_at, current_time_check))::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_combined\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.890018Z", "completed_at": "2024-09-03T18:15:36.948585Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.950035Z", "completed_at": "2024-09-03T18:15:36.950040Z"}], "thread_id": "Thread-4", "execution_time": 0.06625795364379883, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.886556Z", "completed_at": "2024-09-03T18:15:36.949073Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.959122Z", "completed_at": "2024-09-03T18:15:36.959132Z"}], "thread_id": "Thread-6", "execution_time": 0.07565712928771973, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.944901Z", "completed_at": "2024-09-03T18:15:36.958859Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:36.960522Z", "completed_at": "2024-09-03T18:15:36.960526Z"}], "thread_id": "Thread-2", "execution_time": 0.0742490291595459, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__document", "compiled": true, "compiled_code": "\n\nwith ticket_document as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_document\"\n\n), grouped as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_documents_grouped\"\n\n), final as (\n select\n cast(ticket_document.ticket_id as TEXT) as document_id,\n grouped.chunk_index,\n grouped.chunk_tokens as chunk_tokens_approximate,\n ticket_document.ticket_markdown || '\\n\\n## COMMENTS\\n\\n' || grouped.comments_group_markdown\n as chunk\n from ticket_document\n join grouped\n on grouped.ticket_id = ticket_document.ticket_id\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__document\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.961220Z", "completed_at": "2024-09-03T18:15:36.994680Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:37.001359Z", "completed_at": "2024-09-03T18:15:37.001370Z"}], "thread_id": "Thread-8", "execution_time": 0.0518488883972168, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__sla_policies", "compiled": true, "compiled_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_combined\"\n\n), agent_work_calendar_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"\n\n), requester_wait_calendar_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"\n\n\n\n), agent_work_business_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"\n\n), requester_wait_business_sla as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"\n\n\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n\n\n)\n\nselect \n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(metric as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sla_applied_at as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (sla_applied_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (sla_applied_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (sla_applied_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (sla_applied_at)::timestamp)))\n / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > \n current_timestamp::timestamp\n)\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__sla_policies\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.969460Z", "completed_at": "2024-09-03T18:15:37.027273Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:37.027510Z", "completed_at": "2024-09-03T18:15:37.027518Z"}], "thread_id": "Thread-3", "execution_time": 0.06662225723266602, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_metrics", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n), __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n), __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n), __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n), ticket_enriched as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\n\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_reply_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times_calendar\n\n), ticket_comments as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__comment_metrics\"\n\n), ticket_work_time_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_calendar\n\n-- business hour CTEs\n\n\n), ticket_first_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_resolution_time_business\n\n), ticket_full_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_full_resolution_time_business\n\n), ticket_work_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_business\n\n), ticket_first_reply_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_reply_time_business\n\n\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.requester_last_login_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.requester_last_login_at)::timestamp)))\n /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.assignee_last_login_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.assignee_last_login_at)::timestamp)))\n /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.created_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.created_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.created_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.created_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.updated_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.updated_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.updated_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.updated_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:37.023091Z", "completed_at": "2024-09-03T18:15:37.027907Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:37.028292Z", "completed_at": "2024-09-03T18:15:37.028296Z"}], "thread_id": "Thread-7", "execution_time": 0.005861043930053711, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd", "compiled": true, "compiled_code": "\n \n \n\nselect\n sla_event_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__sla_policies\"\nwhere sla_event_id is not null\ngroup by sla_event_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:37.029408Z", "completed_at": "2024-09-03T18:15:37.036532Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:37.036981Z", "completed_at": "2024-09-03T18:15:37.036988Z"}], "thread_id": "Thread-7", "execution_time": 0.008167266845703125, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_summary", "compiled": true, "compiled_code": "with ticket_metrics as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\n\n), user_table as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), user_sum as (\n select\n cast(1 as integer) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as integer) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_summary\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:37.033660Z", "completed_at": "2024-09-03T18:15:37.040350Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:37.040592Z", "completed_at": "2024-09-03T18:15:37.040597Z"}], "thread_id": "Thread-4", "execution_time": 0.007948875427246094, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:37.037379Z", "completed_at": "2024-09-03T18:15:37.041017Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:37.041243Z", "completed_at": "2024-09-03T18:15:37.041247Z"}], "thread_id": "Thread-6", "execution_time": 0.007887125015258789, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:36.950427Z", "completed_at": "2024-09-03T18:15:37.446140Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:37.446373Z", "completed_at": "2024-09-03T18:15:37.446380Z"}], "thread_id": "Thread-5", "execution_time": 0.5027801990509033, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__field_history_pivot", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"\n\n\n\n\n \nwith __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n), field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from __dbt__cte__int_zendesk__field_history_enriched\n \n where cast( date_trunc('day', valid_starting_at) as date) >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\")\n \n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast(date_trunc('day', valid_starting_at) as date) as date_day\n\n \n \n ,min(case when lower(field_name) = 'status' then filtered.value end) as status\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'assignee_id' then filtered.value end) as assignee_id\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'priority' then filtered.value end) as priority\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:37.448165Z", "completed_at": "2024-09-03T18:15:38.085129Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:38.086296Z", "completed_at": "2024-09-03T18:15:38.086316Z"}], "thread_id": "Thread-8", "execution_time": 0.6996059417724609, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__field_history_scd", "compiled": true, "compiled_code": "-- model needs to materialize as a table to avoid erroneous null values\n \n\n\n\nwith change_data as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\"\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n \n\n ,status\n ,sum(case when status is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as status_field_partition\n \n\n ,assignee_id\n ,sum(case when assignee_id is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as assignee_id_field_partition\n \n\n ,priority\n ,sum(case when priority is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as priority_field_partition\n \n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n \n\n ,first_value( status ) over (partition by status_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as status\n \n \n\n ,first_value( assignee_id ) over (partition by assignee_id_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as assignee_id\n \n \n\n ,first_value( priority ) over (partition by priority_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as priority\n \n \n from set_values\n) \n\nselect *\nfrom fill_values", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_scd\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:38.151318Z", "completed_at": "2024-09-03T18:15:38.713614Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:38.713991Z", "completed_at": "2024-09-03T18:15:38.714006Z"}], "thread_id": "Thread-3", "execution_time": 0.6182460784912109, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_field_history", "compiled": true, "compiled_code": "with change_data as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_scd\"\n \n \n where valid_from >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\")\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"\n where date_day = (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\" )\n\n\n\n), calendar as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\"\n where date_day <= current_date\n \n and date_day >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\")\n \n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n \n \n , coalesce(change_data.status, most_recent_data.status) as status\n \n , coalesce(change_data.assignee_id, most_recent_data.assignee_id) as assignee_id\n \n , coalesce(change_data.priority, most_recent_data.priority) as priority\n \n \n \n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n \n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n \n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n \n , status\n -- create a batch/partition once a new value is provided\n , sum( case when status is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as status_field_partition\n\n \n , assignee_id\n -- create a batch/partition once a new value is provided\n , sum( case when assignee_id is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as assignee_id_field_partition\n\n \n , priority\n -- create a batch/partition once a new value is provided\n , sum( case when priority is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as priority_field_partition\n\n \n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n \n -- grab the value that started this batch/partition\n , first_value( status ) over (\n partition by ticket_id, status_field_partition \n order by date_day asc rows between unbounded preceding and current row) as status\n \n -- grab the value that started this batch/partition\n , first_value( assignee_id ) over (\n partition by ticket_id, assignee_id_field_partition \n order by date_day asc rows between unbounded preceding and current row) as assignee_id\n \n -- grab the value that started this batch/partition\n , first_value( priority ) over (\n partition by ticket_id, priority_field_partition \n order by date_day asc rows between unbounded preceding and current row) as priority\n \n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( status as TEXT ) = 'is_null' then null else status end as status\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( assignee_id as TEXT ) = 'is_null' then null else assignee_id end as assignee_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( priority as TEXT ) = 'is_null' then null else priority end as priority\n \n\n from fill_values\n\n), surrogate_key as (\n\n select\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-09-03T18:15:38.771115Z", "completed_at": "2024-09-03T18:15:38.781841Z"}, {"name": "execute", "started_at": "2024-09-03T18:15:38.782312Z", "completed_at": "2024-09-03T18:15:38.782321Z"}], "thread_id": "Thread-7", "execution_time": 0.011881828308105469, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_backlog", "compiled": true, "compiled_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n\n\nwith ticket_field_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"\n\n), tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), group_names as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), brands as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n --Looking at all history fields the users passed through in their dbt_project.yml file\n --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n \n --Looking at all history fields the users passed through in their dbt_project.yml file\n --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.priority\n \n \n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n \n\n \n\n --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as bigint)\n \n\n \n\n \n\n \n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_backlog\""}], "elapsed_time": 7.776034116744995, "args": {"source_freshness_run_project_hooks": false, "favor_state": false, "print": true, "write_json": true, "project_dir": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "static": false, "log_file_max_bytes": 10485760, "require_resource_names_without_spaces": false, "log_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests/logs", "use_colors": true, "static_parser": true, "log_format_file": "debug", "log_level": "info", "which": "generate", "defer": false, "log_format": "default", "invocation_command": "dbt docs generate", "log_level_file": "debug", "populate_cache": true, "compile": true, "quiet": false, "empty_catalog": false, "strict_mode": false, "select": [], "indirect_selection": "eager", "introspect": true, "require_explicit_package_overrides_for_builtin_materializations": true, "vars": {}, "exclude": [], "cache_selected_only": false, "partial_parse": true, "warn_error_options": {"include": [], "exclude": []}, "profiles_dir": "/Users/catherinefritz/.dbt", "printer_width": 80, "partial_parse_file_diff": true, "version_check": true, "use_colors_file": true, "send_anonymous_usage_stats": true, "show_resource_report": false, "macro_debugging": false, "enable_legacy_logger": false}} \ No newline at end of file diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 5097b507..8b88e4c5 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -6,7 +6,7 @@ version: '0.18.0' profile: 'integration_tests' vars: - zendesk_schema: zendesk_integration_tests_50 + zendesk_schema: zz_zendesk #zendesk_integration_tests_50 zendesk_source: zendesk_organization_identifier: "organization_data" zendesk_schedule_identifier: "schedule_data" @@ -35,9 +35,8 @@ vars: # using_domain_names: false # using_user_tags: false # using_organization_tags: false - # fivetran_integrity_sla_first_reply_time_exclusion_tickets: (1,56,80) - # fivetran_consistency_ticket_metrics_exclusion_tickets: (11092,11093,11094) - # fivetran_integrity_sla_count_match_tickets: (76) + # fivetran_integrity_sla_metric_parity_exclusion_tickets: (56,80) + # fivetran_integrity_sla_first_reply_time_exclusion_tickets: (56,80) models: +schema: "zendesk_{{ var('directed_schema','dev') }}" From 246624b623ffa29f1a261fc2104e5f66e7b62a23 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 9 Oct 2024 12:10:29 -0500 Subject: [PATCH 69/76] fix yml --- integration_tests/dbt_project.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 8b88e4c5..7820394d 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -6,7 +6,7 @@ version: '0.18.0' profile: 'integration_tests' vars: - zendesk_schema: zz_zendesk #zendesk_integration_tests_50 + zendesk_schema: zendesk_integration_tests_50 zendesk_source: zendesk_organization_identifier: "organization_data" zendesk_schedule_identifier: "schedule_data" From 888f1097233c81da730f6147d66f4883c57415e1 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 9 Oct 2024 12:49:55 -0500 Subject: [PATCH 70/76] update changelog --- CHANGELOG.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index e1125081..802f1322 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,8 @@ - This feature is enabled by default, but can be easily turned off by setting `using_schedule_histories` to `false` in `dbt_project.yml`. - The `int_zendesk__schedule_spine` model is now enhanced to incorporate these schedule changes, making it possible for downstream models to reflect the most up-to-date schedule data. - This improves granularity for Zendesk metrics related to agent availability, SLA tracking, and time-based performance analysis, allowing for more accurate reporting. +### dbt_zendesk_source changes (see the [Release Notes](https://github.com/fivetran/dbt_zendesk_source/releases/tag/v0.13.0) for more details) +- Added the `stg_zendesk__audit_log` table for capturing schedule changes. This is disabled when setting `using_schedule_histories` to `false` in `dbt_project.yml`. ## New Features - Holiday support: Users can now choose to disable holiday tracking by setting `using_holidays` to `false` in `dbt_project.yml`. @@ -17,11 +19,14 @@ - `int_zendesk__schedule_timezones`: Merges schedule history with time zone shifts. - `int_zendesk__schedule_holidays`: Identifies and calculates holiday periods for each schedule. - Rebuilt logic in `int_zendesk__schedule_spine` to consolidate updates from the new intermediate models. +### dbt_zendesk_source changes (see the [Release Notes](https://github.com/fivetran/dbt_zendesk_source/releases/tag/v0.13.0) for more details) +- Updated the `stg_zendesk__schedule_holidays` model to allow users to disable holiday processing by setting `using_holidays` to `false`. +- Added field-level documentation for the `stg_zendesk__audit_log` table. ## Bug Fixes - Resolved a bug in the `int_zendesk__schedule_spine` model where users experienced large gaps in non-holiday periods. The updated logic addresses this issue. -## Under the Hood Improvements +## Under the Hood - Replaced instances of `dbt.date_trunc` with `dbt_date.week_start` to standardize week start dates to Sunday across all warehouses, since our schedule logic relies on consistent weeks. - Replaced the deprecated `dbt.current_timestamp_backcompat()` function with `dbt.current_timestamp()` to ensure all timestamps are captured in UTC. - Added seed data for `audit_log` to enhance integration testing capabilities. From af951b1ac3bcb081d2c85f7c1c5b6c290196da77 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 9 Oct 2024 17:38:37 -0500 Subject: [PATCH 71/76] address review comments and regen docs --- .buildkite/scripts/run_models.sh | 8 +++--- CHANGELOG.md | 26 +++++++++++++------ DECISIONLOG.md | 2 +- README.md | 6 +++-- dbt_project.yml | 8 +++--- docs/catalog.json | 2 +- docs/manifest.json | 2 +- integration_tests/dbt_project.yml | 1 + .../int_zendesk__schedule_history.sql | 2 +- .../int_zendesk__schedule_holiday.sql | 2 +- .../int_zendesk__schedule_spine.sql | 2 +- .../int_zendesk__schedule_timezones.sql | 2 +- 12 files changed, 39 insertions(+), 24 deletions(-) diff --git a/.buildkite/scripts/run_models.sh b/.buildkite/scripts/run_models.sh index 36b437d3..eb16fa9a 100644 --- a/.buildkite/scripts/run_models.sh +++ b/.buildkite/scripts/run_models.sh @@ -20,8 +20,10 @@ dbt seed --target "$db" --full-refresh dbt run --target "$db" --full-refresh dbt run --target "$db" dbt test --target "$db" -dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_schedule_histories: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh -dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_schedule_histories: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" +dbt run --vars '{using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh +dbt test --target "$db" +dbt run --vars '{using_schedule_histories: true, using_holidays: false}' --target "$db" --full-refresh +dbt test --target "$db" dbt test --target "$db" -# dbt run-operation fivetran_utils.drop_schemas_automation --target "$db" \ No newline at end of file +dbt run-operation fivetran_utils.drop_schemas_automation --target "$db" \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 802f1322..c6179ad4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,16 +3,21 @@ ## Breaking Changes (Full refresh required after upgrading) ### Schedule Change Support -- Support for schedule changes has been added: - - Schedule changes are now extracted directly from the audit log, providing a view of schedule modifications over time. - - This feature is enabled by default, but can be easily turned off by setting `using_schedule_histories` to `false` in `dbt_project.yml`. - - The `int_zendesk__schedule_spine` model is now enhanced to incorporate these schedule changes, making it possible for downstream models to reflect the most up-to-date schedule data. - - This improves granularity for Zendesk metrics related to agent availability, SLA tracking, and time-based performance analysis, allowing for more accurate reporting. +- Support for schedule changes has been added. This feature is disabled by default, but can be enabled by setting variable `using_schedule_histories` to `true` in `dbt_project.yml`: +```yml +vars: + using_schedule_histories: true +``` + - Schedule changes can now extracted directly from the audit log, providing a view of schedule modifications over time. + - The `int_zendesk__schedule_spine` model is now incorporates these schedule changes, making it possible for downstream models to reflect the most up-to-date schedule data. + - Note this is only in effect when `using_schedule_histories` is true. + - This improves granularity for Zendesk metrics related to agent availability, SLA tracking, and time-based performance analysis. ### dbt_zendesk_source changes (see the [Release Notes](https://github.com/fivetran/dbt_zendesk_source/releases/tag/v0.13.0) for more details) -- Added the `stg_zendesk__audit_log` table for capturing schedule changes. This is disabled when setting `using_schedule_histories` to `false` in `dbt_project.yml`. +- Introduced the `stg_zendesk__audit_log` table for capturing schedule changes from Zendesk's audit log. + - This model is disabled by default, to enable it set variable `using_schedule_histories` to `true` in `dbt_project.yml`. ## New Features -- Holiday support: Users can now choose to disable holiday tracking by setting `using_holidays` to `false` in `dbt_project.yml`. +- Holiday support: Users can now choose to disable holiday tracking by setting variable `using_holidays` to `false` in `dbt_project.yml`. - New intermediate models have been introduced to streamline both the readability and maintainability: - `int_zendesk__timezone_daylight`: A utility model that maintains a record of daylight savings adjustments for each time zone. - `int_zendesk__schedule_history`: Captures a full history of schedule changes for each `schedule_id`. @@ -20,12 +25,17 @@ - `int_zendesk__schedule_holidays`: Identifies and calculates holiday periods for each schedule. - Rebuilt logic in `int_zendesk__schedule_spine` to consolidate updates from the new intermediate models. ### dbt_zendesk_source changes (see the [Release Notes](https://github.com/fivetran/dbt_zendesk_source/releases/tag/v0.13.0) for more details) -- Updated the `stg_zendesk__schedule_holidays` model to allow users to disable holiday processing by setting `using_holidays` to `false`. +- Updated the `stg_zendesk__schedule_holidays` model to allow users to disable holiday processing by setting variable `using_holidays` to `false`. - Added field-level documentation for the `stg_zendesk__audit_log` table. ## Bug Fixes - Resolved a bug in the `int_zendesk__schedule_spine` model where users experienced large gaps in non-holiday periods. The updated logic addresses this issue. +## Decision log +- Added the following [DECISIONLOG](https://github.com/fivetran/dbt_zendesk/blob/main/DECISIONLOG.md) entries: + - Entry addressing how multiple schedule changes in a single day are handled. Only the last change of the day is captured to align with day-based downstream logic. + - Entry to clarify backfilling of schedule history. The most recent schedule is sourced from `stg_zendesk__schedule`, while historical changes are managed separately, allowing users to disable the history feature if needed. + ## Under the Hood - Replaced instances of `dbt.date_trunc` with `dbt_date.week_start` to standardize week start dates to Sunday across all warehouses, since our schedule logic relies on consistent weeks. - Replaced the deprecated `dbt.current_timestamp_backcompat()` function with `dbt.current_timestamp()` to ensure all timestamps are captured in UTC. diff --git a/DECISIONLOG.md b/DECISIONLOG.md index cdc4b4bf..4e4158b6 100644 --- a/DECISIONLOG.md +++ b/DECISIONLOG.md @@ -2,7 +2,7 @@ ## Schedule History ### Handling Multiple Schedule Changes in a Day -While integrating schedule changes from the audit_log source, we observed that multiple changes can occur on the same day, often when users are still finalizing a schedule. To maintain clarity and align with our day-based downstream logic, we decided to capture only the last change made on any given day. If this approach proves insufficient for your use case, please submit a feature request to enable support for multiple changes within a single day. +While integrating schedule changes from the audit_log source, we observed that multiple changes can occur on the same day, often when users are still finalizing a schedule. To maintain clarity and align with our day-based downstream logic, we decided to capture only the last change made on any given day. If this approach proves insufficient for your use case, please submit a [feature request](https://github.com/fivetran/dbt_zendesk/issues/new/choose) for enabling support for multiple changes within a single day. ### Backfilling the Schedule History Although the schedule history extracted from the audit log includes the most recent schedule, we exclude it in the `int_zendesk__schedule_history` model. Instead, we rely on the schedule from `stg_zendesk__schedule`, since it represents the live schedule. This approach also allows users who are not using schedule histories to easily disable the history feature. We join the live schedule with the schedule history model and bridge the valid_from and valid_until dates to maintain consistency. diff --git a/README.md b/README.md index 13554508..116c4f79 100644 --- a/README.md +++ b/README.md @@ -79,11 +79,13 @@ vars: zendesk_schema: your_schema_name ``` -### Step 4: Disable models for non-existent sources -This package takes into consideration that not every Zendesk Support account utilizes the `schedule`, `schedule_holiday`, `ticket_schedule` `daylight_time`, `time_zone`, `domain_name`, `user_tag`, `organization_tag`, or `ticket_form_history` features, and allows you to disable the corresponding functionality. By default, all variables' values are assumed to be `true`. Add variables for only the tables you want to disable: +### Step 4: Enable/Disable models for non-existent sources +This package takes into consideration that not every Zendesk Support account utilizes the `schedule`, `schedule_holiday`, `audit_log`, `domain_name`, `user_tag`, `organization_tag`, or `ticket_form_history` features, and allows you to disable the corresponding functionality. By default, all variables' values are assumed to be `true`, except for `using_schedule_histories`. Add variables for only the tables you want to enable/disable: ```yml vars: + using_schedule_histories: True #Enable if you are using audit_logs for schedule histories using_schedules: False #Disable if you are not using schedules + using_holidays: False #Disable if you are not using schedule_holidays for holidays using_domain_names: False #Disable if you are not using domain names using_user_tags: False #Disable if you are not using user tags using_ticket_form_history: False #Disable if you are not using ticket form history diff --git a/dbt_project.yml b/dbt_project.yml index c086e9c6..7b17cc2b 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -13,10 +13,10 @@ models: intermediate: +schema: zendesk_intermediate +materialized: table - # int_zendesk__schedule_timezones: - # +materialized: ephemeral - # int_zendesk__schedule_holiday: - # +materialized: ephemeral + int_zendesk__schedule_timezones: + +materialized: ephemeral + int_zendesk__schedule_holiday: + +materialized: ephemeral reply_times: +materialized: ephemeral resolution_times: diff --git a/docs/catalog.json b/docs/catalog.json index 2f77cb11..0f0c4d03 100644 --- a/docs/catalog.json +++ b/docs/catalog.json @@ -1 +1 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", "dbt_version": "1.8.3", "generated_at": "2024-10-09T16:53:17.756232Z", "invocation_id": "ee1cfc0d-443e-4374-ad8a-25dc360a3746", "env": {}}, "nodes": {"seed.zendesk_integration_tests.audit_log_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "audit_log_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "integer", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "action": {"type": "text", "index": 3, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 4, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 5, "name": "change_description", "comment": null}, "created_at": {"type": "text", "index": 6, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 7, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 8, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 9, "name": "source_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.audit_log_data"}, "seed.zendesk_integration_tests.brand_data_postgres": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "brand_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.brand_data_postgres"}, "seed.zendesk_integration_tests.daylight_time_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "daylight_time_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.daylight_time_data"}, "seed.zendesk_integration_tests.domain_name_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "domain_name_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.domain_name_data"}, "seed.zendesk_integration_tests.group_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "group_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.group_data"}, "seed.zendesk_integration_tests.organization_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_data"}, "seed.zendesk_integration_tests.organization_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_tag_data"}, "seed.zendesk_integration_tests.schedule_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_data"}, "seed.zendesk_integration_tests.schedule_holiday_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_holiday_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data"}, "seed.zendesk_integration_tests.ticket_comment_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_comment_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_comment_data"}, "seed.zendesk_integration_tests.ticket_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_data"}, "seed.zendesk_integration_tests.ticket_field_history_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_field_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data"}, "seed.zendesk_integration_tests.ticket_form_history_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_form_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data"}, "seed.zendesk_integration_tests.ticket_schedule_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data"}, "seed.zendesk_integration_tests.ticket_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_tag_data"}, "seed.zendesk_integration_tests.time_zone_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "time_zone_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.time_zone_data"}, "seed.zendesk_integration_tests.user_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_data"}, "seed.zendesk_integration_tests.user_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_tag_data"}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "integer", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "integer", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "integer", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "bigint", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "bigint", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "bigint", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "numeric", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "numeric", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "numeric", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "numeric", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "numeric", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours"}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "double precision", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "double precision", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "double precision", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "double precision", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours"}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses"}, "model.zendesk.int_zendesk__assignee_updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__assignee_updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "assignee_id": {"type": "bigint", "index": 2, "name": "assignee_id", "comment": null}, "last_updated": {"type": "timestamp without time zone", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "bigint", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__assignee_updates"}, "model.zendesk.int_zendesk__comment_metrics": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comment_metrics", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "last_comment_added_at": {"type": "timestamp without time zone", "index": 2, "name": "last_comment_added_at", "comment": null}, "count_public_agent_comments": {"type": "bigint", "index": 3, "name": "count_public_agent_comments", "comment": null}, "count_agent_comments": {"type": "bigint", "index": 4, "name": "count_agent_comments", "comment": null}, "count_end_user_comments": {"type": "bigint", "index": 5, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "bigint", "index": 6, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "bigint", "index": 7, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "bigint", "index": 8, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "bigint", "index": 9, "name": "count_ticket_handoffs", "comment": null}, "count_agent_replies": {"type": "bigint", "index": 10, "name": "count_agent_replies", "comment": null}, "is_one_touch_resolution": {"type": "boolean", "index": 11, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "boolean", "index": 12, "name": "is_two_touch_resolution", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__comment_metrics"}, "model.zendesk.int_zendesk__field_calendar_spine": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"date_day": {"type": "date", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "text", "index": 3, "name": "ticket_day_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_calendar_spine"}, "model.zendesk.int_zendesk__field_history_pivot": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_pivot", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "date_day": {"type": "date", "index": 2, "name": "date_day", "comment": null}, "status": {"type": "text", "index": 3, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 4, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 5, "name": "priority", "comment": null}, "ticket_day_id": {"type": "text", "index": 6, "name": "ticket_day_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_pivot"}, "model.zendesk.int_zendesk__field_history_scd": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_scd", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"valid_from": {"type": "date", "index": 1, "name": "valid_from", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "text", "index": 3, "name": "ticket_day_id", "comment": null}, "status": {"type": "text", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_scd"}, "model.zendesk.int_zendesk__latest_ticket_form": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_form_id": {"type": "bigint", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "text", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "latest_form_index": {"type": "bigint", "index": 7, "name": "latest_form_index", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__latest_ticket_form"}, "model.zendesk.int_zendesk__organization_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__organization_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "integer", "index": 6, "name": "external_id", "comment": null}, "organization_tags": {"type": "text", "index": 7, "name": "organization_tags", "comment": null}, "domain_names": {"type": "text", "index": 8, "name": "domain_names", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__organization_aggregates"}, "model.zendesk.int_zendesk__reply_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "sla_schedule_start_at": {"type": "timestamp without time zone", "index": 6, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_schedule_end_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "sum_lapsed_business_minutes": {"type": "numeric", "index": 9, "name": "sum_lapsed_business_minutes", "comment": null}, "in_business_hours": {"type": "boolean", "index": 10, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_breach_at", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 12, "name": "is_breached_during_schedule", "comment": null}, "total_schedule_weekly_business_minutes": {"type": "numeric", "index": 13, "name": "total_schedule_weekly_business_minutes", "comment": null}, "sla_breach_exact_time": {"type": "timestamp without time zone", "index": 14, "name": "sla_breach_exact_time", "comment": null}, "week_number": {"type": "integer", "index": 15, "name": "week_number", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours"}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "text", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "text", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "bigint", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "text", "index": 10, "name": "sla_policy_name", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours"}, "model.zendesk.int_zendesk__reply_time_combined": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_combined", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "sum_lapsed_business_minutes": {"type": "numeric", "index": 6, "name": "sum_lapsed_business_minutes", "comment": null}, "target": {"type": "integer", "index": 7, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 8, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 9, "name": "sla_breach_at", "comment": null}, "week_number": {"type": "numeric", "index": 10, "name": "week_number", "comment": null}, "sla_schedule_start_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "timestamp without time zone", "index": 12, "name": "sla_schedule_end_at", "comment": null}, "agent_reply_at": {"type": "timestamp without time zone", "index": 13, "name": "agent_reply_at", "comment": null}, "next_solved_at": {"type": "timestamp without time zone", "index": 14, "name": "next_solved_at", "comment": null}, "day_index": {"type": "bigint", "index": 15, "name": "day_index", "comment": null}, "next_schedule_start": {"type": "timestamp without time zone", "index": 16, "name": "next_schedule_start", "comment": null}, "first_sla_breach_at": {"type": "timestamp without time zone", "index": 17, "name": "first_sla_breach_at", "comment": null}, "sum_lapsed_business_minutes_new": {"type": "numeric", "index": 18, "name": "sum_lapsed_business_minutes_new", "comment": null}, "total_runtime_minutes": {"type": "double precision", "index": 19, "name": "total_runtime_minutes", "comment": null}, "current_time_check": {"type": "timestamp with time zone", "index": 20, "name": "current_time_check", "comment": null}, "updated_sla_policy_starts_at": {"type": "timestamp without time zone", "index": 21, "name": "updated_sla_policy_starts_at", "comment": null}, "is_stale_sla_policy": {"type": "boolean", "index": 22, "name": "is_stale_sla_policy", "comment": null}, "is_sla_breached": {"type": "boolean", "index": 23, "name": "is_sla_breached", "comment": null}, "total_new_minutes": {"type": "double precision", "index": 24, "name": "total_new_minutes", "comment": null}, "sla_update_at": {"type": "timestamp without time zone", "index": 25, "name": "sla_update_at", "comment": null}, "sla_elapsed_time": {"type": "double precision", "index": 26, "name": "sla_elapsed_time", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_combined"}, "model.zendesk.int_zendesk__requester_updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "requester_id": {"type": "bigint", "index": 2, "name": "requester_id", "comment": null}, "last_updated": {"type": "timestamp without time zone", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "bigint", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_updates"}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "integer", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "integer", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "integer", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "bigint", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "bigint", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "bigint", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "numeric", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "numeric", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "numeric", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "numeric", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "numeric", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours"}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "double precision", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "double precision", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "double precision", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "double precision", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"}, "model.zendesk.int_zendesk__schedule_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "schedule_id_index": {"type": "bigint", "index": 2, "name": "schedule_id_index", "comment": null}, "start_time": {"type": "integer", "index": 3, "name": "start_time", "comment": null}, "end_time": {"type": "integer", "index": 4, "name": "end_time", "comment": null}, "valid_from": {"type": "date", "index": 5, "name": "valid_from", "comment": null}, "valid_until": {"type": "date", "index": 6, "name": "valid_until", "comment": null}, "day_of_week": {"type": "text", "index": 7, "name": "day_of_week", "comment": null}, "day_of_week_number": {"type": "integer", "index": 8, "name": "day_of_week_number", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_history"}, "model.zendesk.int_zendesk__schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_holiday", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"holiday_name": {"type": "text", "index": 1, "name": "holiday_name", "comment": null}, "schedule_id": {"type": "text", "index": 2, "name": "schedule_id", "comment": null}, "holiday_valid_from": {"type": "timestamp without time zone", "index": 3, "name": "holiday_valid_from", "comment": null}, "holiday_valid_until": {"type": "timestamp without time zone", "index": 4, "name": "holiday_valid_until", "comment": null}, "holiday_starting_sunday": {"type": "timestamp without time zone", "index": 5, "name": "holiday_starting_sunday", "comment": null}, "holiday_ending_sunday": {"type": "timestamp without time zone", "index": 6, "name": "holiday_ending_sunday", "comment": null}, "holiday_weeks_spanned": {"type": "integer", "index": 7, "name": "holiday_weeks_spanned", "comment": null}, "holiday_date": {"type": "timestamp without time zone", "index": 8, "name": "holiday_date", "comment": null}, "holiday_start_or_end": {"type": "text", "index": 9, "name": "holiday_start_or_end", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_holiday"}, "model.zendesk.int_zendesk__schedule_spine": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_spine", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "valid_from": {"type": "timestamp without time zone", "index": 2, "name": "valid_from", "comment": null}, "valid_until": {"type": "timestamp without time zone", "index": 3, "name": "valid_until", "comment": null}, "start_time_utc": {"type": "bigint", "index": 4, "name": "start_time_utc", "comment": null}, "end_time_utc": {"type": "bigint", "index": 5, "name": "end_time_utc", "comment": null}, "change_type": {"type": "text", "index": 6, "name": "change_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_spine"}, "model.zendesk.int_zendesk__schedule_timezones": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_timezones", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "schedule_id_index": {"type": "bigint", "index": 2, "name": "schedule_id_index", "comment": null}, "time_zone": {"type": "text", "index": 3, "name": "time_zone", "comment": null}, "schedule_name": {"type": "text", "index": 4, "name": "schedule_name", "comment": null}, "offset_minutes": {"type": "integer", "index": 5, "name": "offset_minutes", "comment": null}, "start_time_utc": {"type": "bigint", "index": 6, "name": "start_time_utc", "comment": null}, "end_time_utc": {"type": "bigint", "index": 7, "name": "end_time_utc", "comment": null}, "schedule_valid_from": {"type": "timestamp without time zone", "index": 8, "name": "schedule_valid_from", "comment": null}, "schedule_valid_until": {"type": "timestamp without time zone", "index": 9, "name": "schedule_valid_until", "comment": null}, "schedule_starting_sunday": {"type": "timestamp without time zone", "index": 10, "name": "schedule_starting_sunday", "comment": null}, "schedule_ending_sunday": {"type": "timestamp without time zone", "index": 11, "name": "schedule_ending_sunday", "comment": null}, "change_type": {"type": "text", "index": 12, "name": "change_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_timezones"}, "model.zendesk.int_zendesk__sla_policy_applied": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "text", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "text", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "bigint", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "text", "index": 10, "name": "sla_policy_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__sla_policy_applied"}, "model.zendesk.int_zendesk__ticket_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_aggregates"}, "model.zendesk.int_zendesk__ticket_comment_document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_comment_id": {"type": "bigint", "index": 1, "name": "ticket_comment_id", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "comment_time": {"type": "timestamp without time zone", "index": 3, "name": "comment_time", "comment": null}, "comment_markdown": {"type": "text", "index": 4, "name": "comment_markdown", "comment": null}, "comment_tokens": {"type": "integer", "index": 5, "name": "comment_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_comment_document"}, "model.zendesk.int_zendesk__ticket_comment_documents_grouped": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_documents_grouped", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "integer", "index": 1, "name": "ticket_id", "comment": null}, "chunk_index": {"type": "integer", "index": 2, "name": "chunk_index", "comment": null}, "comments_group_markdown": {"type": "text", "index": 3, "name": "comments_group_markdown", "comment": null}, "chunk_tokens": {"type": "bigint", "index": 4, "name": "chunk_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_comment_documents_grouped"}, "model.zendesk.int_zendesk__ticket_document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_markdown": {"type": "text", "index": 2, "name": "ticket_markdown", "comment": null}, "ticket_tokens": {"type": "integer", "index": 3, "name": "ticket_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_document"}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "first_agent_assignment_date": {"type": "timestamp without time zone", "index": 2, "name": "first_agent_assignment_date", "comment": null}, "first_assignee_id": {"type": "text", "index": 3, "name": "first_assignee_id", "comment": null}, "last_agent_assignment_date": {"type": "timestamp without time zone", "index": 4, "name": "last_agent_assignment_date", "comment": null}, "last_assignee_id": {"type": "text", "index": 5, "name": "last_assignee_id", "comment": null}, "assignee_stations_count": {"type": "bigint", "index": 6, "name": "assignee_stations_count", "comment": null}, "unique_assignee_count": {"type": "bigint", "index": 7, "name": "unique_assignee_count", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "double precision", "index": 8, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee"}, "model.zendesk.int_zendesk__ticket_historical_group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "group_stations_count": {"type": "bigint", "index": 2, "name": "group_stations_count", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_group"}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "latest_satisfaction_reason": {"type": "text", "index": 2, "name": "latest_satisfaction_reason", "comment": null}, "latest_satisfaction_comment": {"type": "text", "index": 3, "name": "latest_satisfaction_comment", "comment": null}, "first_satisfaction_score": {"type": "text", "index": 4, "name": "first_satisfaction_score", "comment": null}, "latest_satisfaction_score": {"type": "text", "index": 5, "name": "latest_satisfaction_score", "comment": null}, "count_satisfaction_scores": {"type": "bigint", "index": 6, "name": "count_satisfaction_scores", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 7, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 8, "name": "is_bad_to_good_satisfaction_score", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction"}, "model.zendesk.int_zendesk__ticket_historical_status": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "status_duration_calendar_minutes": {"type": "double precision", "index": 4, "name": "status_duration_calendar_minutes", "comment": null}, "status": {"type": "text", "index": 5, "name": "status", "comment": null}, "ticket_status_counter": {"type": "bigint", "index": 6, "name": "ticket_status_counter", "comment": null}, "unique_status_counter": {"type": "bigint", "index": 7, "name": "unique_status_counter", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_status"}, "model.zendesk.int_zendesk__ticket_schedules": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_schedules", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "schedule_id": {"type": "text", "index": 2, "name": "schedule_id", "comment": null}, "schedule_created_at": {"type": "timestamp without time zone", "index": 3, "name": "schedule_created_at", "comment": null}, "schedule_invalidated_at": {"type": "timestamp with time zone", "index": 4, "name": "schedule_invalidated_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_schedules"}, "model.zendesk.int_zendesk__updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "text", "index": 2, "name": "field_name", "comment": null}, "value": {"type": "text", "index": 3, "name": "value", "comment": null}, "is_public": {"type": "boolean", "index": 4, "name": "is_public", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 6, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 7, "name": "valid_ending_at", "comment": null}, "ticket_created_date": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__updates"}, "model.zendesk.int_zendesk__user_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__user_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "bigint", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 5, "name": "last_login_at", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "email": {"type": "text", "index": 8, "name": "email", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}, "organization_id": {"type": "bigint", "index": 10, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 11, "name": "phone", "comment": null}, "role": {"type": "text", "index": 12, "name": "role", "comment": null}, "ticket_restriction": {"type": "text", "index": 13, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 14, "name": "time_zone", "comment": null}, "locale": {"type": "text", "index": 15, "name": "locale", "comment": null}, "is_active": {"type": "boolean", "index": 16, "name": "is_active", "comment": null}, "is_suspended": {"type": "boolean", "index": 17, "name": "is_suspended", "comment": null}, "user_tags": {"type": "text", "index": 18, "name": "user_tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__user_aggregates"}, "model.zendesk_source.stg_zendesk__audit_log": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"audit_log_id": {"type": "text", "index": 1, "name": "audit_log_id", "comment": null}, "action": {"type": "text", "index": 2, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 3, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 4, "name": "change_description", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 6, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 7, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 8, "name": "source_type", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 9, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__audit_log"}, "model.zendesk_source.stg_zendesk__audit_log_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "integer", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "action": {"type": "text", "index": 3, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 4, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 5, "name": "change_description", "comment": null}, "created_at": {"type": "text", "index": 6, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 7, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 8, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 9, "name": "source_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__audit_log_tmp"}, "model.zendesk_source.stg_zendesk__brand": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"brand_id": {"type": "bigint", "index": 1, "name": "brand_id", "comment": null}, "brand_url": {"type": "text", "index": 2, "name": "brand_url", "comment": null}, "name": {"type": "text", "index": 3, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 4, "name": "subdomain", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand"}, "model.zendesk_source.stg_zendesk__brand_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp"}, "model.zendesk_source.stg_zendesk__daylight_time": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"daylight_end_utc": {"type": "timestamp without time zone", "index": 1, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 2, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 3, "name": "daylight_start_utc", "comment": null}, "time_zone": {"type": "text", "index": 4, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 5, "name": "year", "comment": null}, "daylight_offset_minutes": {"type": "integer", "index": 6, "name": "daylight_offset_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time"}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp"}, "model.zendesk_source.stg_zendesk__domain_name": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "domain_name": {"type": "text", "index": 2, "name": "domain_name", "comment": null}, "index": {"type": "integer", "index": 3, "name": "index", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name"}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp"}, "model.zendesk_source.stg_zendesk__group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"group_id": {"type": "bigint", "index": 1, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 2, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group"}, "model.zendesk_source.stg_zendesk__group_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group_tmp"}, "model.zendesk_source.stg_zendesk__organization": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "integer", "index": 6, "name": "external_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization"}, "model.zendesk_source.stg_zendesk__organization_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag"}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp"}, "model.zendesk_source.stg_zendesk__organization_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp"}, "model.zendesk_source.stg_zendesk__schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "end_time": {"type": "bigint", "index": 2, "name": "end_time", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "schedule_name": {"type": "text", "index": 4, "name": "schedule_name", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "time_zone": {"type": "text", "index": 6, "name": "time_zone", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule"}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"_fivetran_deleted": {"type": "boolean", "index": 1, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "holiday_end_date_at": {"type": "timestamp without time zone", "index": 3, "name": "holiday_end_date_at", "comment": null}, "holiday_id": {"type": "text", "index": 4, "name": "holiday_id", "comment": null}, "holiday_name": {"type": "text", "index": 5, "name": "holiday_name", "comment": null}, "schedule_id": {"type": "text", "index": 6, "name": "schedule_id", "comment": null}, "holiday_start_date_at": {"type": "timestamp without time zone", "index": 7, "name": "holiday_start_date_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday"}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp"}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket"}, "model.zendesk_source.stg_zendesk__ticket_comment": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_comment_id": {"type": "bigint", "index": 1, "name": "ticket_comment_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "body": {"type": "text", "index": 4, "name": "body", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "is_public": {"type": "boolean", "index": 6, "name": "is_public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "user_id": {"type": "bigint", "index": 8, "name": "user_id", "comment": null}, "is_facebook_comment": {"type": "boolean", "index": 9, "name": "is_facebook_comment", "comment": null}, "is_tweet": {"type": "boolean", "index": 10, "name": "is_tweet", "comment": null}, "is_voice_comment": {"type": "boolean", "index": 11, "name": "is_voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment"}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp"}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "text", "index": 2, "name": "field_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 4, "name": "valid_ending_at", "comment": null}, "value": {"type": "text", "index": 5, "name": "value", "comment": null}, "user_id": {"type": "bigint", "index": 6, "name": "user_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history"}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_form_id": {"type": "bigint", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "text", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history"}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "schedule_id": {"type": "text", "index": 3, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule"}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "integer", "index": 1, "name": "ticket_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag"}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp"}, "model.zendesk_source.stg_zendesk__time_zone": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"standard_offset": {"type": "text", "index": 1, "name": "standard_offset", "comment": null}, "time_zone": {"type": "text", "index": 2, "name": "time_zone", "comment": null}, "standard_offset_minutes": {"type": "integer", "index": 3, "name": "standard_offset_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone"}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp"}, "model.zendesk_source.stg_zendesk__user": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "bigint", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 5, "name": "last_login_at", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "email": {"type": "text", "index": 8, "name": "email", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}, "organization_id": {"type": "bigint", "index": 10, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 11, "name": "phone", "comment": null}, "role": {"type": "text", "index": 12, "name": "role", "comment": null}, "ticket_restriction": {"type": "text", "index": 13, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 14, "name": "time_zone", "comment": null}, "locale": {"type": "text", "index": 15, "name": "locale", "comment": null}, "is_active": {"type": "boolean", "index": 16, "name": "is_active", "comment": null}, "is_suspended": {"type": "boolean", "index": 17, "name": "is_suspended", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user"}, "model.zendesk_source.stg_zendesk__user_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag"}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp"}, "model.zendesk_source.stg_zendesk__user_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tmp"}, "model.zendesk.zendesk__document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"document_id": {"type": "text", "index": 1, "name": "document_id", "comment": null}, "chunk_index": {"type": "integer", "index": 2, "name": "chunk_index", "comment": null}, "chunk_tokens_approximate": {"type": "bigint", "index": 3, "name": "chunk_tokens_approximate", "comment": null}, "chunk": {"type": "text", "index": 4, "name": "chunk", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__document"}, "model.zendesk.zendesk__sla_policies": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__sla_policies", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"sla_event_id": {"type": "text", "index": 1, "name": "sla_event_id", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 3, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 4, "name": "metric", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 7, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 8, "name": "sla_breach_at", "comment": null}, "sla_elapsed_time": {"type": "double precision", "index": 9, "name": "sla_elapsed_time", "comment": null}, "is_active_sla": {"type": "boolean", "index": 10, "name": "is_active_sla", "comment": null}, "is_sla_breach": {"type": "boolean", "index": 11, "name": "is_sla_breach", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__sla_policies"}, "model.zendesk.zendesk__ticket_backlog": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_backlog", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"date_day": {"type": "date", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "status": {"type": "text", "index": 3, "name": "status", "comment": null}, "created_channel": {"type": "text", "index": 4, "name": "created_channel", "comment": null}, "assignee_name": {"type": "text", "index": 5, "name": "assignee_name", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_backlog"}, "model.zendesk.zendesk__ticket_enriched": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_enriched", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "text", "index": 33, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "bigint", "index": 34, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "text", "index": 35, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "text", "index": 36, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "text", "index": 37, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "text", "index": 38, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 39, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 40, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "text", "index": 41, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "text", "index": 42, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "bigint", "index": 43, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "timestamp without time zone", "index": 44, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "timestamp without time zone", "index": 45, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "text", "index": 46, "name": "requester_role", "comment": null}, "requester_email": {"type": "text", "index": 47, "name": "requester_email", "comment": null}, "requester_name": {"type": "text", "index": 48, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "boolean", "index": 49, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "text", "index": 50, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "text", "index": 51, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "bigint", "index": 52, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "timestamp without time zone", "index": 53, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "timestamp without time zone", "index": 54, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "bigint", "index": 55, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "text", "index": 56, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "text", "index": 57, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "integer", "index": 58, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "timestamp without time zone", "index": 59, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "timestamp without time zone", "index": 60, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "bigint", "index": 61, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "text", "index": 62, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "boolean", "index": 63, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "text", "index": 64, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "text", "index": 65, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "boolean", "index": 66, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "text", "index": 67, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "text", "index": 68, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "bigint", "index": 69, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "text", "index": 70, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "text", "index": 71, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "text", "index": 72, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "boolean", "index": 73, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "text", "index": 74, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "text", "index": 75, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "bigint", "index": 76, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "timestamp without time zone", "index": 77, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "timestamp without time zone", "index": 78, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "text", "index": 79, "name": "group_name", "comment": null}, "organization_name": {"type": "text", "index": 80, "name": "organization_name", "comment": null}, "requester_tag": {"type": "text", "index": 81, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "text", "index": 82, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "text", "index": 83, "name": "assignee_tag", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_enriched"}, "model.zendesk.zendesk__ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_field_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_day_id": {"type": "text", "index": 1, "name": "ticket_day_id", "comment": null}, "date_day": {"type": "date", "index": 2, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 3, "name": "ticket_id", "comment": null}, "status": {"type": "text", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_field_history"}, "model.zendesk.zendesk__ticket_metrics": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_metrics", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "text", "index": 33, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "bigint", "index": 34, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "text", "index": 35, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "text", "index": 36, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "text", "index": 37, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "text", "index": 38, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 39, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 40, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "text", "index": 41, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "text", "index": 42, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "bigint", "index": 43, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "timestamp without time zone", "index": 44, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "timestamp without time zone", "index": 45, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "text", "index": 46, "name": "requester_role", "comment": null}, "requester_email": {"type": "text", "index": 47, "name": "requester_email", "comment": null}, "requester_name": {"type": "text", "index": 48, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "boolean", "index": 49, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "text", "index": 50, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "text", "index": 51, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "bigint", "index": 52, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "timestamp without time zone", "index": 53, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "timestamp without time zone", "index": 54, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "bigint", "index": 55, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "text", "index": 56, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "text", "index": 57, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "integer", "index": 58, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "timestamp without time zone", "index": 59, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "timestamp without time zone", "index": 60, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "bigint", "index": 61, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "text", "index": 62, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "boolean", "index": 63, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "text", "index": 64, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "text", "index": 65, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "boolean", "index": 66, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "text", "index": 67, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "text", "index": 68, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "bigint", "index": 69, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "text", "index": 70, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "text", "index": 71, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "text", "index": 72, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "boolean", "index": 73, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "text", "index": 74, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "text", "index": 75, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "bigint", "index": 76, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "timestamp without time zone", "index": 77, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "timestamp without time zone", "index": 78, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "text", "index": 79, "name": "group_name", "comment": null}, "organization_name": {"type": "text", "index": 80, "name": "organization_name", "comment": null}, "requester_tag": {"type": "text", "index": 81, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "text", "index": 82, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "text", "index": 83, "name": "assignee_tag", "comment": null}, "first_reply_time_calendar_minutes": {"type": "double precision", "index": 84, "name": "first_reply_time_calendar_minutes", "comment": null}, "total_reply_time_calendar_minutes": {"type": "double precision", "index": 85, "name": "total_reply_time_calendar_minutes", "comment": null}, "count_agent_comments": {"type": "bigint", "index": 86, "name": "count_agent_comments", "comment": null}, "count_public_agent_comments": {"type": "bigint", "index": 87, "name": "count_public_agent_comments", "comment": null}, "count_end_user_comments": {"type": "bigint", "index": 88, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "bigint", "index": 89, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "bigint", "index": 90, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "bigint", "index": 91, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "bigint", "index": 92, "name": "count_ticket_handoffs", "comment": null}, "ticket_last_comment_date": {"type": "timestamp without time zone", "index": 93, "name": "ticket_last_comment_date", "comment": null}, "unique_assignee_count": {"type": "bigint", "index": 94, "name": "unique_assignee_count", "comment": null}, "assignee_stations_count": {"type": "bigint", "index": 95, "name": "assignee_stations_count", "comment": null}, "group_stations_count": {"type": "bigint", "index": 96, "name": "group_stations_count", "comment": null}, "first_assignee_id": {"type": "text", "index": 97, "name": "first_assignee_id", "comment": null}, "last_assignee_id": {"type": "text", "index": 98, "name": "last_assignee_id", "comment": null}, "first_agent_assignment_date": {"type": "timestamp without time zone", "index": 99, "name": "first_agent_assignment_date", "comment": null}, "last_agent_assignment_date": {"type": "timestamp without time zone", "index": 100, "name": "last_agent_assignment_date", "comment": null}, "first_solved_at": {"type": "timestamp without time zone", "index": 101, "name": "first_solved_at", "comment": null}, "last_solved_at": {"type": "timestamp without time zone", "index": 102, "name": "last_solved_at", "comment": null}, "first_assignment_to_resolution_calendar_minutes": {"type": "double precision", "index": 103, "name": "first_assignment_to_resolution_calendar_minutes", "comment": null}, "last_assignment_to_resolution_calendar_minutes": {"type": "double precision", "index": 104, "name": "last_assignment_to_resolution_calendar_minutes", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "double precision", "index": 105, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}, "first_resolution_calendar_minutes": {"type": "double precision", "index": 106, "name": "first_resolution_calendar_minutes", "comment": null}, "final_resolution_calendar_minutes": {"type": "double precision", "index": 107, "name": "final_resolution_calendar_minutes", "comment": null}, "count_resolutions": {"type": "bigint", "index": 108, "name": "count_resolutions", "comment": null}, "count_reopens": {"type": "bigint", "index": 109, "name": "count_reopens", "comment": null}, "ticket_deleted_count": {"type": "bigint", "index": 110, "name": "ticket_deleted_count", "comment": null}, "total_ticket_recoveries": {"type": "bigint", "index": 111, "name": "total_ticket_recoveries", "comment": null}, "last_status_assignment_date": {"type": "timestamp without time zone", "index": 112, "name": "last_status_assignment_date", "comment": null}, "new_status_duration_in_calendar_minutes": {"type": "double precision", "index": 113, "name": "new_status_duration_in_calendar_minutes", "comment": null}, "open_status_duration_in_calendar_minutes": {"type": "double precision", "index": 114, "name": "open_status_duration_in_calendar_minutes", "comment": null}, "agent_wait_time_in_calendar_minutes": {"type": "double precision", "index": 115, "name": "agent_wait_time_in_calendar_minutes", "comment": null}, "requester_wait_time_in_calendar_minutes": {"type": "double precision", "index": 116, "name": "requester_wait_time_in_calendar_minutes", "comment": null}, "solve_time_in_calendar_minutes": {"type": "double precision", "index": 117, "name": "solve_time_in_calendar_minutes", "comment": null}, "agent_work_time_in_calendar_minutes": {"type": "double precision", "index": 118, "name": "agent_work_time_in_calendar_minutes", "comment": null}, "on_hold_time_in_calendar_minutes": {"type": "double precision", "index": 119, "name": "on_hold_time_in_calendar_minutes", "comment": null}, "total_agent_replies": {"type": "bigint", "index": 120, "name": "total_agent_replies", "comment": null}, "requester_last_login_age_minutes": {"type": "double precision", "index": 121, "name": "requester_last_login_age_minutes", "comment": null}, "assignee_last_login_age_minutes": {"type": "double precision", "index": 122, "name": "assignee_last_login_age_minutes", "comment": null}, "unsolved_ticket_age_minutes": {"type": "double precision", "index": 123, "name": "unsolved_ticket_age_minutes", "comment": null}, "unsolved_ticket_age_since_update_minutes": {"type": "double precision", "index": 124, "name": "unsolved_ticket_age_since_update_minutes", "comment": null}, "is_one_touch_resolution": {"type": "boolean", "index": 125, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "boolean", "index": 126, "name": "is_two_touch_resolution", "comment": null}, "is_multi_touch_resolution": {"type": "boolean", "index": 127, "name": "is_multi_touch_resolution", "comment": null}, "first_resolution_business_minutes": {"type": "numeric", "index": 128, "name": "first_resolution_business_minutes", "comment": null}, "full_resolution_business_minutes": {"type": "numeric", "index": 129, "name": "full_resolution_business_minutes", "comment": null}, "first_reply_time_business_minutes": {"type": "numeric", "index": 130, "name": "first_reply_time_business_minutes", "comment": null}, "agent_wait_time_in_business_minutes": {"type": "numeric", "index": 131, "name": "agent_wait_time_in_business_minutes", "comment": null}, "requester_wait_time_in_business_minutes": {"type": "numeric", "index": 132, "name": "requester_wait_time_in_business_minutes", "comment": null}, "solve_time_in_business_minutes": {"type": "numeric", "index": 133, "name": "solve_time_in_business_minutes", "comment": null}, "agent_work_time_in_business_minutes": {"type": "numeric", "index": 134, "name": "agent_work_time_in_business_minutes", "comment": null}, "on_hold_time_in_business_minutes": {"type": "numeric", "index": 135, "name": "on_hold_time_in_business_minutes", "comment": null}, "new_status_duration_in_business_minutes": {"type": "numeric", "index": 136, "name": "new_status_duration_in_business_minutes", "comment": null}, "open_status_duration_in_business_minutes": {"type": "numeric", "index": 137, "name": "open_status_duration_in_business_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_metrics"}, "model.zendesk.zendesk__ticket_summary": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_summary", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_count": {"type": "bigint", "index": 1, "name": "user_count", "comment": null}, "active_agent_count": {"type": "bigint", "index": 2, "name": "active_agent_count", "comment": null}, "deleted_user_count": {"type": "bigint", "index": 3, "name": "deleted_user_count", "comment": null}, "end_user_count": {"type": "bigint", "index": 4, "name": "end_user_count", "comment": null}, "suspended_user_count": {"type": "bigint", "index": 5, "name": "suspended_user_count", "comment": null}, "new_ticket_count": {"type": "bigint", "index": 6, "name": "new_ticket_count", "comment": null}, "on_hold_ticket_count": {"type": "bigint", "index": 7, "name": "on_hold_ticket_count", "comment": null}, "open_ticket_count": {"type": "bigint", "index": 8, "name": "open_ticket_count", "comment": null}, "pending_ticket_count": {"type": "bigint", "index": 9, "name": "pending_ticket_count", "comment": null}, "solved_ticket_count": {"type": "bigint", "index": 10, "name": "solved_ticket_count", "comment": null}, "problem_ticket_count": {"type": "bigint", "index": 11, "name": "problem_ticket_count", "comment": null}, "assigned_ticket_count": {"type": "bigint", "index": 12, "name": "assigned_ticket_count", "comment": null}, "reassigned_ticket_count": {"type": "bigint", "index": 13, "name": "reassigned_ticket_count", "comment": null}, "reopened_ticket_count": {"type": "bigint", "index": 14, "name": "reopened_ticket_count", "comment": null}, "surveyed_satisfaction_ticket_count": {"type": "bigint", "index": 15, "name": "surveyed_satisfaction_ticket_count", "comment": null}, "unassigned_unsolved_ticket_count": {"type": "bigint", "index": 16, "name": "unassigned_unsolved_ticket_count", "comment": null}, "unreplied_ticket_count": {"type": "bigint", "index": 17, "name": "unreplied_ticket_count", "comment": null}, "unreplied_unsolved_ticket_count": {"type": "bigint", "index": 18, "name": "unreplied_unsolved_ticket_count", "comment": null}, "unsolved_ticket_count": {"type": "bigint", "index": 19, "name": "unsolved_ticket_count", "comment": null}, "recovered_ticket_count": {"type": "bigint", "index": 20, "name": "recovered_ticket_count", "comment": null}, "deleted_ticket_count": {"type": "bigint", "index": 21, "name": "deleted_ticket_count", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_summary"}}, "sources": {"source.zendesk_source.zendesk.audit_log": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "audit_log_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "integer", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "action": {"type": "text", "index": 3, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 4, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 5, "name": "change_description", "comment": null}, "created_at": {"type": "text", "index": 6, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 7, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 8, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 9, "name": "source_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.audit_log"}, "source.zendesk_source.zendesk.brand": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "brand_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.brand"}, "source.zendesk_source.zendesk.daylight_time": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "daylight_time_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.daylight_time"}, "source.zendesk_source.zendesk.domain_name": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "domain_name_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.domain_name"}, "source.zendesk_source.zendesk.group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "group_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.group"}, "source.zendesk_source.zendesk.organization": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization"}, "source.zendesk_source.zendesk.organization_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization_tag"}, "source.zendesk_source.zendesk.schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule"}, "source.zendesk_source.zendesk.schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_holiday_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule_holiday"}, "source.zendesk_source.zendesk.ticket_comment": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_comment_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_comment"}, "source.zendesk_source.zendesk.ticket": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket"}, "source.zendesk_source.zendesk.ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_field_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_field_history"}, "source.zendesk_source.zendesk.ticket_form_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_form_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_form_history"}, "source.zendesk_source.zendesk.ticket_schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_schedule"}, "source.zendesk_source.zendesk.ticket_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_tag"}, "source.zendesk_source.zendesk.time_zone": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "time_zone_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.time_zone"}, "source.zendesk_source.zendesk.user": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user"}, "source.zendesk_source.zendesk.user_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user_tag"}}, "errors": null} \ No newline at end of file +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", "dbt_version": "1.8.3", "generated_at": "2024-10-09T22:37:33.758709Z", "invocation_id": "85319b70-bc7d-461c-bfee-a3b90f57ada9", "env": {}}, "nodes": {"seed.zendesk_integration_tests.audit_log_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "audit_log_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "integer", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "action": {"type": "text", "index": 3, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 4, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 5, "name": "change_description", "comment": null}, "created_at": {"type": "text", "index": 6, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 7, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 8, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 9, "name": "source_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.audit_log_data"}, "seed.zendesk_integration_tests.brand_data_postgres": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "brand_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.brand_data_postgres"}, "seed.zendesk_integration_tests.daylight_time_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "daylight_time_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.daylight_time_data"}, "seed.zendesk_integration_tests.domain_name_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "domain_name_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.domain_name_data"}, "seed.zendesk_integration_tests.group_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "group_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.group_data"}, "seed.zendesk_integration_tests.organization_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_data"}, "seed.zendesk_integration_tests.organization_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_tag_data"}, "seed.zendesk_integration_tests.schedule_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_data"}, "seed.zendesk_integration_tests.schedule_holiday_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_holiday_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data"}, "seed.zendesk_integration_tests.ticket_comment_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_comment_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_comment_data"}, "seed.zendesk_integration_tests.ticket_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_data"}, "seed.zendesk_integration_tests.ticket_field_history_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_field_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data"}, "seed.zendesk_integration_tests.ticket_form_history_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_form_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data"}, "seed.zendesk_integration_tests.ticket_schedule_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data"}, "seed.zendesk_integration_tests.ticket_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_tag_data"}, "seed.zendesk_integration_tests.time_zone_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "time_zone_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.time_zone_data"}, "seed.zendesk_integration_tests.user_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_data"}, "seed.zendesk_integration_tests.user_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_tag_data"}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "integer", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "integer", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "integer", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "bigint", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "bigint", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "bigint", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "numeric", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "numeric", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "numeric", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "numeric", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "numeric", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours"}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "double precision", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "double precision", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "double precision", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "double precision", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours"}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses"}, "model.zendesk.int_zendesk__assignee_updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__assignee_updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "assignee_id": {"type": "bigint", "index": 2, "name": "assignee_id", "comment": null}, "last_updated": {"type": "timestamp without time zone", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "bigint", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__assignee_updates"}, "model.zendesk.int_zendesk__comment_metrics": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comment_metrics", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "last_comment_added_at": {"type": "timestamp without time zone", "index": 2, "name": "last_comment_added_at", "comment": null}, "count_public_agent_comments": {"type": "bigint", "index": 3, "name": "count_public_agent_comments", "comment": null}, "count_agent_comments": {"type": "bigint", "index": 4, "name": "count_agent_comments", "comment": null}, "count_end_user_comments": {"type": "bigint", "index": 5, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "bigint", "index": 6, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "bigint", "index": 7, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "bigint", "index": 8, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "bigint", "index": 9, "name": "count_ticket_handoffs", "comment": null}, "count_agent_replies": {"type": "bigint", "index": 10, "name": "count_agent_replies", "comment": null}, "is_one_touch_resolution": {"type": "boolean", "index": 11, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "boolean", "index": 12, "name": "is_two_touch_resolution", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__comment_metrics"}, "model.zendesk.int_zendesk__field_calendar_spine": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"date_day": {"type": "date", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "text", "index": 3, "name": "ticket_day_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_calendar_spine"}, "model.zendesk.int_zendesk__field_history_pivot": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_pivot", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "date_day": {"type": "date", "index": 2, "name": "date_day", "comment": null}, "status": {"type": "text", "index": 3, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 4, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 5, "name": "priority", "comment": null}, "ticket_day_id": {"type": "text", "index": 6, "name": "ticket_day_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_pivot"}, "model.zendesk.int_zendesk__field_history_scd": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_scd", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"valid_from": {"type": "date", "index": 1, "name": "valid_from", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "text", "index": 3, "name": "ticket_day_id", "comment": null}, "status": {"type": "text", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_scd"}, "model.zendesk.int_zendesk__latest_ticket_form": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_form_id": {"type": "bigint", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "text", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "latest_form_index": {"type": "bigint", "index": 7, "name": "latest_form_index", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__latest_ticket_form"}, "model.zendesk.int_zendesk__organization_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__organization_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "integer", "index": 6, "name": "external_id", "comment": null}, "organization_tags": {"type": "text", "index": 7, "name": "organization_tags", "comment": null}, "domain_names": {"type": "text", "index": 8, "name": "domain_names", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__organization_aggregates"}, "model.zendesk.int_zendesk__reply_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "sla_schedule_start_at": {"type": "timestamp without time zone", "index": 6, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_schedule_end_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "sum_lapsed_business_minutes": {"type": "numeric", "index": 9, "name": "sum_lapsed_business_minutes", "comment": null}, "in_business_hours": {"type": "boolean", "index": 10, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_breach_at", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 12, "name": "is_breached_during_schedule", "comment": null}, "total_schedule_weekly_business_minutes": {"type": "numeric", "index": 13, "name": "total_schedule_weekly_business_minutes", "comment": null}, "sla_breach_exact_time": {"type": "timestamp without time zone", "index": 14, "name": "sla_breach_exact_time", "comment": null}, "week_number": {"type": "integer", "index": 15, "name": "week_number", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours"}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "text", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "text", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "bigint", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "text", "index": 10, "name": "sla_policy_name", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours"}, "model.zendesk.int_zendesk__reply_time_combined": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_combined", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "sum_lapsed_business_minutes": {"type": "numeric", "index": 6, "name": "sum_lapsed_business_minutes", "comment": null}, "target": {"type": "integer", "index": 7, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 8, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 9, "name": "sla_breach_at", "comment": null}, "week_number": {"type": "numeric", "index": 10, "name": "week_number", "comment": null}, "sla_schedule_start_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "timestamp without time zone", "index": 12, "name": "sla_schedule_end_at", "comment": null}, "agent_reply_at": {"type": "timestamp without time zone", "index": 13, "name": "agent_reply_at", "comment": null}, "next_solved_at": {"type": "timestamp without time zone", "index": 14, "name": "next_solved_at", "comment": null}, "day_index": {"type": "bigint", "index": 15, "name": "day_index", "comment": null}, "next_schedule_start": {"type": "timestamp without time zone", "index": 16, "name": "next_schedule_start", "comment": null}, "first_sla_breach_at": {"type": "timestamp without time zone", "index": 17, "name": "first_sla_breach_at", "comment": null}, "sum_lapsed_business_minutes_new": {"type": "numeric", "index": 18, "name": "sum_lapsed_business_minutes_new", "comment": null}, "total_runtime_minutes": {"type": "double precision", "index": 19, "name": "total_runtime_minutes", "comment": null}, "current_time_check": {"type": "timestamp with time zone", "index": 20, "name": "current_time_check", "comment": null}, "updated_sla_policy_starts_at": {"type": "timestamp without time zone", "index": 21, "name": "updated_sla_policy_starts_at", "comment": null}, "is_stale_sla_policy": {"type": "boolean", "index": 22, "name": "is_stale_sla_policy", "comment": null}, "is_sla_breached": {"type": "boolean", "index": 23, "name": "is_sla_breached", "comment": null}, "total_new_minutes": {"type": "double precision", "index": 24, "name": "total_new_minutes", "comment": null}, "sla_update_at": {"type": "timestamp without time zone", "index": 25, "name": "sla_update_at", "comment": null}, "sla_elapsed_time": {"type": "double precision", "index": 26, "name": "sla_elapsed_time", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_combined"}, "model.zendesk.int_zendesk__requester_updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "requester_id": {"type": "bigint", "index": 2, "name": "requester_id", "comment": null}, "last_updated": {"type": "timestamp without time zone", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "bigint", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_updates"}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "integer", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "integer", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "integer", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "bigint", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "bigint", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "bigint", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "numeric", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "numeric", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "numeric", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "numeric", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "numeric", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours"}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "double precision", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "double precision", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "double precision", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "double precision", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp with time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"}, "model.zendesk.int_zendesk__schedule_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "schedule_id_index": {"type": "bigint", "index": 2, "name": "schedule_id_index", "comment": null}, "start_time": {"type": "integer", "index": 3, "name": "start_time", "comment": null}, "end_time": {"type": "integer", "index": 4, "name": "end_time", "comment": null}, "valid_from": {"type": "date", "index": 5, "name": "valid_from", "comment": null}, "valid_until": {"type": "date", "index": 6, "name": "valid_until", "comment": null}, "day_of_week": {"type": "text", "index": 7, "name": "day_of_week", "comment": null}, "day_of_week_number": {"type": "integer", "index": 8, "name": "day_of_week_number", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_history"}, "model.zendesk.int_zendesk__schedule_spine": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_spine", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "valid_from": {"type": "timestamp without time zone", "index": 2, "name": "valid_from", "comment": null}, "valid_until": {"type": "timestamp without time zone", "index": 3, "name": "valid_until", "comment": null}, "start_time_utc": {"type": "bigint", "index": 4, "name": "start_time_utc", "comment": null}, "end_time_utc": {"type": "bigint", "index": 5, "name": "end_time_utc", "comment": null}, "change_type": {"type": "text", "index": 6, "name": "change_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_spine"}, "model.zendesk.int_zendesk__sla_policy_applied": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "text", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "text", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "bigint", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "text", "index": 10, "name": "sla_policy_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__sla_policy_applied"}, "model.zendesk.int_zendesk__ticket_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_aggregates"}, "model.zendesk.int_zendesk__ticket_comment_document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_comment_id": {"type": "bigint", "index": 1, "name": "ticket_comment_id", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "comment_time": {"type": "timestamp without time zone", "index": 3, "name": "comment_time", "comment": null}, "comment_markdown": {"type": "text", "index": 4, "name": "comment_markdown", "comment": null}, "comment_tokens": {"type": "integer", "index": 5, "name": "comment_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_comment_document"}, "model.zendesk.int_zendesk__ticket_comment_documents_grouped": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_documents_grouped", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "integer", "index": 1, "name": "ticket_id", "comment": null}, "chunk_index": {"type": "integer", "index": 2, "name": "chunk_index", "comment": null}, "comments_group_markdown": {"type": "text", "index": 3, "name": "comments_group_markdown", "comment": null}, "chunk_tokens": {"type": "bigint", "index": 4, "name": "chunk_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_comment_documents_grouped"}, "model.zendesk.int_zendesk__ticket_document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_markdown": {"type": "text", "index": 2, "name": "ticket_markdown", "comment": null}, "ticket_tokens": {"type": "integer", "index": 3, "name": "ticket_tokens", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_document"}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "first_agent_assignment_date": {"type": "timestamp without time zone", "index": 2, "name": "first_agent_assignment_date", "comment": null}, "first_assignee_id": {"type": "text", "index": 3, "name": "first_assignee_id", "comment": null}, "last_agent_assignment_date": {"type": "timestamp without time zone", "index": 4, "name": "last_agent_assignment_date", "comment": null}, "last_assignee_id": {"type": "text", "index": 5, "name": "last_assignee_id", "comment": null}, "assignee_stations_count": {"type": "bigint", "index": 6, "name": "assignee_stations_count", "comment": null}, "unique_assignee_count": {"type": "bigint", "index": 7, "name": "unique_assignee_count", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "double precision", "index": 8, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee"}, "model.zendesk.int_zendesk__ticket_historical_group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "group_stations_count": {"type": "bigint", "index": 2, "name": "group_stations_count", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_group"}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "latest_satisfaction_reason": {"type": "text", "index": 2, "name": "latest_satisfaction_reason", "comment": null}, "latest_satisfaction_comment": {"type": "text", "index": 3, "name": "latest_satisfaction_comment", "comment": null}, "first_satisfaction_score": {"type": "text", "index": 4, "name": "first_satisfaction_score", "comment": null}, "latest_satisfaction_score": {"type": "text", "index": 5, "name": "latest_satisfaction_score", "comment": null}, "count_satisfaction_scores": {"type": "bigint", "index": 6, "name": "count_satisfaction_scores", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 7, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 8, "name": "is_bad_to_good_satisfaction_score", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction"}, "model.zendesk.int_zendesk__ticket_historical_status": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "status_duration_calendar_minutes": {"type": "double precision", "index": 4, "name": "status_duration_calendar_minutes", "comment": null}, "status": {"type": "text", "index": 5, "name": "status", "comment": null}, "ticket_status_counter": {"type": "bigint", "index": 6, "name": "ticket_status_counter", "comment": null}, "unique_status_counter": {"type": "bigint", "index": 7, "name": "unique_status_counter", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_status"}, "model.zendesk.int_zendesk__ticket_schedules": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_schedules", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "schedule_id": {"type": "text", "index": 2, "name": "schedule_id", "comment": null}, "schedule_created_at": {"type": "timestamp without time zone", "index": 3, "name": "schedule_created_at", "comment": null}, "schedule_invalidated_at": {"type": "timestamp with time zone", "index": 4, "name": "schedule_invalidated_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_schedules"}, "model.zendesk.int_zendesk__updates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "text", "index": 2, "name": "field_name", "comment": null}, "value": {"type": "text", "index": 3, "name": "value", "comment": null}, "is_public": {"type": "boolean", "index": 4, "name": "is_public", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 6, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 7, "name": "valid_ending_at", "comment": null}, "ticket_created_date": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__updates"}, "model.zendesk.int_zendesk__user_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__user_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "bigint", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 5, "name": "last_login_at", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "email": {"type": "text", "index": 8, "name": "email", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}, "organization_id": {"type": "bigint", "index": 10, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 11, "name": "phone", "comment": null}, "role": {"type": "text", "index": 12, "name": "role", "comment": null}, "ticket_restriction": {"type": "text", "index": 13, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 14, "name": "time_zone", "comment": null}, "locale": {"type": "text", "index": 15, "name": "locale", "comment": null}, "is_active": {"type": "boolean", "index": 16, "name": "is_active", "comment": null}, "is_suspended": {"type": "boolean", "index": 17, "name": "is_suspended", "comment": null}, "user_tags": {"type": "text", "index": 18, "name": "user_tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__user_aggregates"}, "model.zendesk_source.stg_zendesk__audit_log": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"audit_log_id": {"type": "text", "index": 1, "name": "audit_log_id", "comment": null}, "action": {"type": "text", "index": 2, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 3, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 4, "name": "change_description", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 6, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 7, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 8, "name": "source_type", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 9, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__audit_log"}, "model.zendesk_source.stg_zendesk__audit_log_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "integer", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "action": {"type": "text", "index": 3, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 4, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 5, "name": "change_description", "comment": null}, "created_at": {"type": "text", "index": 6, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 7, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 8, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 9, "name": "source_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__audit_log_tmp"}, "model.zendesk_source.stg_zendesk__brand": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"brand_id": {"type": "bigint", "index": 1, "name": "brand_id", "comment": null}, "brand_url": {"type": "text", "index": 2, "name": "brand_url", "comment": null}, "name": {"type": "text", "index": 3, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 4, "name": "subdomain", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand"}, "model.zendesk_source.stg_zendesk__brand_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp"}, "model.zendesk_source.stg_zendesk__daylight_time": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"daylight_end_utc": {"type": "timestamp without time zone", "index": 1, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 2, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 3, "name": "daylight_start_utc", "comment": null}, "time_zone": {"type": "text", "index": 4, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 5, "name": "year", "comment": null}, "daylight_offset_minutes": {"type": "integer", "index": 6, "name": "daylight_offset_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time"}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp"}, "model.zendesk_source.stg_zendesk__domain_name": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "domain_name": {"type": "text", "index": 2, "name": "domain_name", "comment": null}, "index": {"type": "integer", "index": 3, "name": "index", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name"}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp"}, "model.zendesk_source.stg_zendesk__group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"group_id": {"type": "bigint", "index": 1, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 2, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group"}, "model.zendesk_source.stg_zendesk__group_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group_tmp"}, "model.zendesk_source.stg_zendesk__organization": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "integer", "index": 6, "name": "external_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization"}, "model.zendesk_source.stg_zendesk__organization_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag"}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp"}, "model.zendesk_source.stg_zendesk__organization_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp"}, "model.zendesk_source.stg_zendesk__schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "end_time": {"type": "bigint", "index": 2, "name": "end_time", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "schedule_name": {"type": "text", "index": 4, "name": "schedule_name", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "time_zone": {"type": "text", "index": 6, "name": "time_zone", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule"}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"_fivetran_deleted": {"type": "boolean", "index": 1, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "holiday_end_date_at": {"type": "timestamp without time zone", "index": 3, "name": "holiday_end_date_at", "comment": null}, "holiday_id": {"type": "text", "index": 4, "name": "holiday_id", "comment": null}, "holiday_name": {"type": "text", "index": 5, "name": "holiday_name", "comment": null}, "schedule_id": {"type": "text", "index": 6, "name": "schedule_id", "comment": null}, "holiday_start_date_at": {"type": "timestamp without time zone", "index": 7, "name": "holiday_start_date_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday"}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp"}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket"}, "model.zendesk_source.stg_zendesk__ticket_comment": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_comment_id": {"type": "bigint", "index": 1, "name": "ticket_comment_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "body": {"type": "text", "index": 4, "name": "body", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "is_public": {"type": "boolean", "index": 6, "name": "is_public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "user_id": {"type": "bigint", "index": 8, "name": "user_id", "comment": null}, "is_facebook_comment": {"type": "boolean", "index": 9, "name": "is_facebook_comment", "comment": null}, "is_tweet": {"type": "boolean", "index": 10, "name": "is_tweet", "comment": null}, "is_voice_comment": {"type": "boolean", "index": 11, "name": "is_voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment"}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp"}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "text", "index": 2, "name": "field_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 4, "name": "valid_ending_at", "comment": null}, "value": {"type": "text", "index": 5, "name": "value", "comment": null}, "user_id": {"type": "bigint", "index": 6, "name": "user_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history"}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_form_id": {"type": "bigint", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "text", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history"}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "schedule_id": {"type": "text", "index": 3, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule"}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "integer", "index": 1, "name": "ticket_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag"}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp"}, "model.zendesk_source.stg_zendesk__time_zone": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"standard_offset": {"type": "text", "index": 1, "name": "standard_offset", "comment": null}, "time_zone": {"type": "text", "index": 2, "name": "time_zone", "comment": null}, "standard_offset_minutes": {"type": "integer", "index": 3, "name": "standard_offset_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone"}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp"}, "model.zendesk_source.stg_zendesk__user": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "bigint", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 5, "name": "last_login_at", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "email": {"type": "text", "index": 8, "name": "email", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}, "organization_id": {"type": "bigint", "index": 10, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 11, "name": "phone", "comment": null}, "role": {"type": "text", "index": 12, "name": "role", "comment": null}, "ticket_restriction": {"type": "text", "index": 13, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 14, "name": "time_zone", "comment": null}, "locale": {"type": "text", "index": 15, "name": "locale", "comment": null}, "is_active": {"type": "boolean", "index": 16, "name": "is_active", "comment": null}, "is_suspended": {"type": "boolean", "index": 17, "name": "is_suspended", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user"}, "model.zendesk_source.stg_zendesk__user_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag"}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp"}, "model.zendesk_source.stg_zendesk__user_tmp": {"metadata": {"type": "VIEW", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tmp"}, "model.zendesk.zendesk__document": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__document", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"document_id": {"type": "text", "index": 1, "name": "document_id", "comment": null}, "chunk_index": {"type": "integer", "index": 2, "name": "chunk_index", "comment": null}, "chunk_tokens_approximate": {"type": "bigint", "index": 3, "name": "chunk_tokens_approximate", "comment": null}, "chunk": {"type": "text", "index": 4, "name": "chunk", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__document"}, "model.zendesk.zendesk__sla_policies": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__sla_policies", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"sla_event_id": {"type": "text", "index": 1, "name": "sla_event_id", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 3, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 4, "name": "metric", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 7, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 8, "name": "sla_breach_at", "comment": null}, "sla_elapsed_time": {"type": "double precision", "index": 9, "name": "sla_elapsed_time", "comment": null}, "is_active_sla": {"type": "boolean", "index": 10, "name": "is_active_sla", "comment": null}, "is_sla_breach": {"type": "boolean", "index": 11, "name": "is_sla_breach", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__sla_policies"}, "model.zendesk.zendesk__ticket_backlog": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_backlog", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"date_day": {"type": "date", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "status": {"type": "text", "index": 3, "name": "status", "comment": null}, "created_channel": {"type": "text", "index": 4, "name": "created_channel", "comment": null}, "assignee_name": {"type": "text", "index": 5, "name": "assignee_name", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_backlog"}, "model.zendesk.zendesk__ticket_enriched": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_enriched", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "text", "index": 33, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "bigint", "index": 34, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "text", "index": 35, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "text", "index": 36, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "text", "index": 37, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "text", "index": 38, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 39, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 40, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "text", "index": 41, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "text", "index": 42, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "bigint", "index": 43, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "timestamp without time zone", "index": 44, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "timestamp without time zone", "index": 45, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "text", "index": 46, "name": "requester_role", "comment": null}, "requester_email": {"type": "text", "index": 47, "name": "requester_email", "comment": null}, "requester_name": {"type": "text", "index": 48, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "boolean", "index": 49, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "text", "index": 50, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "text", "index": 51, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "bigint", "index": 52, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "timestamp without time zone", "index": 53, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "timestamp without time zone", "index": 54, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "bigint", "index": 55, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "text", "index": 56, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "text", "index": 57, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "integer", "index": 58, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "timestamp without time zone", "index": 59, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "timestamp without time zone", "index": 60, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "bigint", "index": 61, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "text", "index": 62, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "boolean", "index": 63, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "text", "index": 64, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "text", "index": 65, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "boolean", "index": 66, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "text", "index": 67, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "text", "index": 68, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "bigint", "index": 69, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "text", "index": 70, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "text", "index": 71, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "text", "index": 72, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "boolean", "index": 73, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "text", "index": 74, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "text", "index": 75, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "bigint", "index": 76, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "timestamp without time zone", "index": 77, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "timestamp without time zone", "index": 78, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "text", "index": 79, "name": "group_name", "comment": null}, "organization_name": {"type": "text", "index": 80, "name": "organization_name", "comment": null}, "requester_tag": {"type": "text", "index": 81, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "text", "index": 82, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "text", "index": 83, "name": "assignee_tag", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_enriched"}, "model.zendesk.zendesk__ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_field_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_day_id": {"type": "text", "index": 1, "name": "ticket_day_id", "comment": null}, "date_day": {"type": "date", "index": 2, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 3, "name": "ticket_id", "comment": null}, "status": {"type": "text", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_field_history"}, "model.zendesk.zendesk__ticket_metrics": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_metrics", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 7, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 8, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 9, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 10, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 11, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 12, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 13, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 14, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 15, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 16, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 17, "name": "status", "comment": null}, "subject": {"type": "text", "index": 18, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 19, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 20, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 21, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 22, "name": "type", "comment": null}, "url": {"type": "text", "index": 23, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 24, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 25, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 26, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 27, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 28, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 29, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 30, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 31, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 32, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "text", "index": 33, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "bigint", "index": 34, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "text", "index": 35, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "text", "index": 36, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "text", "index": 37, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "text", "index": 38, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 39, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 40, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "text", "index": 41, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "text", "index": 42, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "bigint", "index": 43, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "timestamp without time zone", "index": 44, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "timestamp without time zone", "index": 45, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "text", "index": 46, "name": "requester_role", "comment": null}, "requester_email": {"type": "text", "index": 47, "name": "requester_email", "comment": null}, "requester_name": {"type": "text", "index": 48, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "boolean", "index": 49, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "text", "index": 50, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "text", "index": 51, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "bigint", "index": 52, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "timestamp without time zone", "index": 53, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "timestamp without time zone", "index": 54, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "bigint", "index": 55, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "text", "index": 56, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "text", "index": 57, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "integer", "index": 58, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "timestamp without time zone", "index": 59, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "timestamp without time zone", "index": 60, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "bigint", "index": 61, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "text", "index": 62, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "boolean", "index": 63, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "text", "index": 64, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "text", "index": 65, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "boolean", "index": 66, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "text", "index": 67, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "text", "index": 68, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "bigint", "index": 69, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "text", "index": 70, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "text", "index": 71, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "text", "index": 72, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "boolean", "index": 73, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "text", "index": 74, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "text", "index": 75, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "bigint", "index": 76, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "timestamp without time zone", "index": 77, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "timestamp without time zone", "index": 78, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "text", "index": 79, "name": "group_name", "comment": null}, "organization_name": {"type": "text", "index": 80, "name": "organization_name", "comment": null}, "requester_tag": {"type": "text", "index": 81, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "text", "index": 82, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "text", "index": 83, "name": "assignee_tag", "comment": null}, "first_reply_time_calendar_minutes": {"type": "double precision", "index": 84, "name": "first_reply_time_calendar_minutes", "comment": null}, "total_reply_time_calendar_minutes": {"type": "double precision", "index": 85, "name": "total_reply_time_calendar_minutes", "comment": null}, "count_agent_comments": {"type": "bigint", "index": 86, "name": "count_agent_comments", "comment": null}, "count_public_agent_comments": {"type": "bigint", "index": 87, "name": "count_public_agent_comments", "comment": null}, "count_end_user_comments": {"type": "bigint", "index": 88, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "bigint", "index": 89, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "bigint", "index": 90, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "bigint", "index": 91, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "bigint", "index": 92, "name": "count_ticket_handoffs", "comment": null}, "ticket_last_comment_date": {"type": "timestamp without time zone", "index": 93, "name": "ticket_last_comment_date", "comment": null}, "unique_assignee_count": {"type": "bigint", "index": 94, "name": "unique_assignee_count", "comment": null}, "assignee_stations_count": {"type": "bigint", "index": 95, "name": "assignee_stations_count", "comment": null}, "group_stations_count": {"type": "bigint", "index": 96, "name": "group_stations_count", "comment": null}, "first_assignee_id": {"type": "text", "index": 97, "name": "first_assignee_id", "comment": null}, "last_assignee_id": {"type": "text", "index": 98, "name": "last_assignee_id", "comment": null}, "first_agent_assignment_date": {"type": "timestamp without time zone", "index": 99, "name": "first_agent_assignment_date", "comment": null}, "last_agent_assignment_date": {"type": "timestamp without time zone", "index": 100, "name": "last_agent_assignment_date", "comment": null}, "first_solved_at": {"type": "timestamp without time zone", "index": 101, "name": "first_solved_at", "comment": null}, "last_solved_at": {"type": "timestamp without time zone", "index": 102, "name": "last_solved_at", "comment": null}, "first_assignment_to_resolution_calendar_minutes": {"type": "double precision", "index": 103, "name": "first_assignment_to_resolution_calendar_minutes", "comment": null}, "last_assignment_to_resolution_calendar_minutes": {"type": "double precision", "index": 104, "name": "last_assignment_to_resolution_calendar_minutes", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "double precision", "index": 105, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}, "first_resolution_calendar_minutes": {"type": "double precision", "index": 106, "name": "first_resolution_calendar_minutes", "comment": null}, "final_resolution_calendar_minutes": {"type": "double precision", "index": 107, "name": "final_resolution_calendar_minutes", "comment": null}, "count_resolutions": {"type": "bigint", "index": 108, "name": "count_resolutions", "comment": null}, "count_reopens": {"type": "bigint", "index": 109, "name": "count_reopens", "comment": null}, "ticket_deleted_count": {"type": "bigint", "index": 110, "name": "ticket_deleted_count", "comment": null}, "total_ticket_recoveries": {"type": "bigint", "index": 111, "name": "total_ticket_recoveries", "comment": null}, "last_status_assignment_date": {"type": "timestamp without time zone", "index": 112, "name": "last_status_assignment_date", "comment": null}, "new_status_duration_in_calendar_minutes": {"type": "double precision", "index": 113, "name": "new_status_duration_in_calendar_minutes", "comment": null}, "open_status_duration_in_calendar_minutes": {"type": "double precision", "index": 114, "name": "open_status_duration_in_calendar_minutes", "comment": null}, "agent_wait_time_in_calendar_minutes": {"type": "double precision", "index": 115, "name": "agent_wait_time_in_calendar_minutes", "comment": null}, "requester_wait_time_in_calendar_minutes": {"type": "double precision", "index": 116, "name": "requester_wait_time_in_calendar_minutes", "comment": null}, "solve_time_in_calendar_minutes": {"type": "double precision", "index": 117, "name": "solve_time_in_calendar_minutes", "comment": null}, "agent_work_time_in_calendar_minutes": {"type": "double precision", "index": 118, "name": "agent_work_time_in_calendar_minutes", "comment": null}, "on_hold_time_in_calendar_minutes": {"type": "double precision", "index": 119, "name": "on_hold_time_in_calendar_minutes", "comment": null}, "total_agent_replies": {"type": "bigint", "index": 120, "name": "total_agent_replies", "comment": null}, "requester_last_login_age_minutes": {"type": "double precision", "index": 121, "name": "requester_last_login_age_minutes", "comment": null}, "assignee_last_login_age_minutes": {"type": "double precision", "index": 122, "name": "assignee_last_login_age_minutes", "comment": null}, "unsolved_ticket_age_minutes": {"type": "double precision", "index": 123, "name": "unsolved_ticket_age_minutes", "comment": null}, "unsolved_ticket_age_since_update_minutes": {"type": "double precision", "index": 124, "name": "unsolved_ticket_age_since_update_minutes", "comment": null}, "is_one_touch_resolution": {"type": "boolean", "index": 125, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "boolean", "index": 126, "name": "is_two_touch_resolution", "comment": null}, "is_multi_touch_resolution": {"type": "boolean", "index": 127, "name": "is_multi_touch_resolution", "comment": null}, "first_resolution_business_minutes": {"type": "numeric", "index": 128, "name": "first_resolution_business_minutes", "comment": null}, "full_resolution_business_minutes": {"type": "numeric", "index": 129, "name": "full_resolution_business_minutes", "comment": null}, "first_reply_time_business_minutes": {"type": "numeric", "index": 130, "name": "first_reply_time_business_minutes", "comment": null}, "agent_wait_time_in_business_minutes": {"type": "numeric", "index": 131, "name": "agent_wait_time_in_business_minutes", "comment": null}, "requester_wait_time_in_business_minutes": {"type": "numeric", "index": 132, "name": "requester_wait_time_in_business_minutes", "comment": null}, "solve_time_in_business_minutes": {"type": "numeric", "index": 133, "name": "solve_time_in_business_minutes", "comment": null}, "agent_work_time_in_business_minutes": {"type": "numeric", "index": 134, "name": "agent_work_time_in_business_minutes", "comment": null}, "on_hold_time_in_business_minutes": {"type": "numeric", "index": 135, "name": "on_hold_time_in_business_minutes", "comment": null}, "new_status_duration_in_business_minutes": {"type": "numeric", "index": 136, "name": "new_status_duration_in_business_minutes", "comment": null}, "open_status_duration_in_business_minutes": {"type": "numeric", "index": 137, "name": "open_status_duration_in_business_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_metrics"}, "model.zendesk.zendesk__ticket_summary": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_summary", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_count": {"type": "bigint", "index": 1, "name": "user_count", "comment": null}, "active_agent_count": {"type": "bigint", "index": 2, "name": "active_agent_count", "comment": null}, "deleted_user_count": {"type": "bigint", "index": 3, "name": "deleted_user_count", "comment": null}, "end_user_count": {"type": "bigint", "index": 4, "name": "end_user_count", "comment": null}, "suspended_user_count": {"type": "bigint", "index": 5, "name": "suspended_user_count", "comment": null}, "new_ticket_count": {"type": "bigint", "index": 6, "name": "new_ticket_count", "comment": null}, "on_hold_ticket_count": {"type": "bigint", "index": 7, "name": "on_hold_ticket_count", "comment": null}, "open_ticket_count": {"type": "bigint", "index": 8, "name": "open_ticket_count", "comment": null}, "pending_ticket_count": {"type": "bigint", "index": 9, "name": "pending_ticket_count", "comment": null}, "solved_ticket_count": {"type": "bigint", "index": 10, "name": "solved_ticket_count", "comment": null}, "problem_ticket_count": {"type": "bigint", "index": 11, "name": "problem_ticket_count", "comment": null}, "assigned_ticket_count": {"type": "bigint", "index": 12, "name": "assigned_ticket_count", "comment": null}, "reassigned_ticket_count": {"type": "bigint", "index": 13, "name": "reassigned_ticket_count", "comment": null}, "reopened_ticket_count": {"type": "bigint", "index": 14, "name": "reopened_ticket_count", "comment": null}, "surveyed_satisfaction_ticket_count": {"type": "bigint", "index": 15, "name": "surveyed_satisfaction_ticket_count", "comment": null}, "unassigned_unsolved_ticket_count": {"type": "bigint", "index": 16, "name": "unassigned_unsolved_ticket_count", "comment": null}, "unreplied_ticket_count": {"type": "bigint", "index": 17, "name": "unreplied_ticket_count", "comment": null}, "unreplied_unsolved_ticket_count": {"type": "bigint", "index": 18, "name": "unreplied_unsolved_ticket_count", "comment": null}, "unsolved_ticket_count": {"type": "bigint", "index": 19, "name": "unsolved_ticket_count", "comment": null}, "recovered_ticket_count": {"type": "bigint", "index": 20, "name": "recovered_ticket_count", "comment": null}, "deleted_ticket_count": {"type": "bigint", "index": 21, "name": "deleted_ticket_count", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_summary"}}, "sources": {"source.zendesk_source.zendesk.audit_log": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "audit_log_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "integer", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "action": {"type": "text", "index": 3, "name": "action", "comment": null}, "actor_id": {"type": "integer", "index": 4, "name": "actor_id", "comment": null}, "change_description": {"type": "text", "index": 5, "name": "change_description", "comment": null}, "created_at": {"type": "text", "index": 6, "name": "created_at", "comment": null}, "source_id": {"type": "integer", "index": 7, "name": "source_id", "comment": null}, "source_label": {"type": "text", "index": 8, "name": "source_label", "comment": null}, "source_type": {"type": "text", "index": 9, "name": "source_type", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.audit_log"}, "source.zendesk_source.zendesk.brand": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "brand_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.brand"}, "source.zendesk_source.zendesk.daylight_time": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "daylight_time_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.daylight_time"}, "source.zendesk_source.zendesk.domain_name": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "domain_name_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.domain_name"}, "source.zendesk_source.zendesk.group": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "group_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.group"}, "source.zendesk_source.zendesk.organization": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization"}, "source.zendesk_source.zendesk.organization_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "organization_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization_tag"}, "source.zendesk_source.zendesk.schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule"}, "source.zendesk_source.zendesk.schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "schedule_holiday_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule_holiday"}, "source.zendesk_source.zendesk.ticket_comment": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_comment_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_comment"}, "source.zendesk_source.zendesk.ticket": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket"}, "source.zendesk_source.zendesk.ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_field_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_field_history"}, "source.zendesk_source.zendesk.ticket_form_history": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_form_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_form_history"}, "source.zendesk_source.zendesk.ticket_schedule": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_schedule"}, "source.zendesk_source.zendesk.ticket_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "ticket_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_tag"}, "source.zendesk_source.zendesk.time_zone": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "time_zone_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.time_zone"}, "source.zendesk_source.zendesk.user": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user"}, "source.zendesk_source.zendesk.user_tag": {"metadata": {"type": "BASE TABLE", "schema": "zz_zendesk", "name": "user_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user_tag"}}, "errors": null} \ No newline at end of file diff --git a/docs/manifest.json b/docs/manifest.json index bdf0d508..7fcd616e 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -1 +1 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": "1.8.3", "generated_at": "2024-10-09T16:53:07.142891Z", "invocation_id": "ee1cfc0d-443e-4374-ad8a-25dc360a3746", "env": {}, "project_name": "zendesk_integration_tests", "project_id": "b8a12ac1bacdf035438fc7646299ce11", "user_id": "8268eefe-e8f7-472e-ab2a-a92f0135d76d", "send_anonymous_usage_stats": true, "adapter_type": "postgres"}, "nodes": {"seed.zendesk_integration_tests.organization_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data.csv", "original_file_path": "seeds/organization_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data", "fqn": ["zendesk_integration_tests", "organization_tag_data"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "adebcb3827e908ab449435adc556aadf587cfad4103cab2c840d3d9fddc16e20"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1728492760.5350242, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_comment_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_comment_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_comment_data.csv", "original_file_path": "seeds/ticket_comment_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_comment_data", "fqn": ["zendesk_integration_tests", "ticket_comment_data"], "alias": "ticket_comment_data", "checksum": {"name": "sha256", "checksum": "033e18229b848b4809699f04f39605771faf437e583a1aefe1af5625f0ac7de5"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "user_id": "bigint", "created": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created": "timestamp"}}, "created_at": 1728492760.536248, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_comment_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_holiday_data": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_holiday_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_holiday_data.csv", "original_file_path": "seeds/schedule_holiday_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data", "fqn": ["zendesk_integration_tests", "schedule_holiday_data"], "alias": "schedule_holiday_data", "checksum": {"name": "sha256", "checksum": "f907dea5e2dc21649bf4eae0392add96a884f19f900dc0f2d568141038ba5d28"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "schedule_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1728492760.538736, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_holiday_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.domain_name_data": {"database": "postgres", "schema": "zz_zendesk", "name": "domain_name_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "domain_name_data.csv", "original_file_path": "seeds/domain_name_data.csv", "unique_id": "seed.zendesk_integration_tests.domain_name_data", "fqn": ["zendesk_integration_tests", "domain_name_data"], "alias": "domain_name_data", "checksum": {"name": "sha256", "checksum": "3bf711417f9269957353aa9e1ddd28ada8bd74e03128a4b8c94e694a560a09cf"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1728492760.5416782, "relation_name": "\"postgres\".\"zz_zendesk\".\"domain_name_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_field_history_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_field_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_field_history_data.csv", "original_file_path": "seeds/ticket_field_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data", "fqn": ["zendesk_integration_tests", "ticket_field_history_data"], "alias": "ticket_field_history_data", "checksum": {"name": "sha256", "checksum": "47c9244103b9a8dc25c5ce75693b8389df92258dde23dae71a09f021cf1b7ab7"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "bigint", "user_id": "bigint", "updated": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "updated": "timestamp"}}, "created_at": 1728492760.544234, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.audit_log_data": {"database": "postgres", "schema": "zz_zendesk", "name": "audit_log_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "audit_log_data.csv", "original_file_path": "seeds/audit_log_data.csv", "unique_id": "seed.zendesk_integration_tests.audit_log_data", "fqn": ["zendesk_integration_tests", "audit_log_data"], "alias": "audit_log_data", "checksum": {"name": "sha256", "checksum": "9979d1f37155833b5af3a3de6d9bcca8ac3143b8ecd59e32efca95a1b8e44b10"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728492760.5455658, "relation_name": "\"postgres\".\"zz_zendesk\".\"audit_log_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_data.csv", "original_file_path": "seeds/ticket_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_data", "fqn": ["zendesk_integration_tests", "ticket_data"], "alias": "ticket_data", "checksum": {"name": "sha256", "checksum": "effe2837ec0ff3ec59fddc7fce0a5f4a6ff0a69daef5ae904244dcbf34425dae"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "bigint", "brand_id": "bigint", "external_id": "bigint", "forum_topic_id": "bigint", "group_id": "bigint", "organization_id": "bigint", "problem_id": "bigint", "requester_id": "bigint", "submitter_id": "bigint", "ticket_form_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "brand_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "forum_topic_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "group_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "problem_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "requester_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "submitter_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "ticket_form_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1728492760.54765, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.brand_data_postgres": {"database": "postgres", "schema": "zz_zendesk", "name": "brand_data_postgres", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data_postgres.csv", "original_file_path": "seeds/brand_data_postgres.csv", "unique_id": "seed.zendesk_integration_tests.brand_data_postgres", "fqn": ["zendesk_integration_tests", "brand_data_postgres"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "aa338ab31e4a221da8a0ed5040ec921a4d39a7377ae37a7e79b49e1402e490f5"}, "config": {"enabled": true, "alias": "brand_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "alias": "brand_data", "enabled": "{{ true if target.type == 'postgres' else false }}"}, "created_at": 1728492760.5490701, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.time_zone_data": {"database": "postgres", "schema": "zz_zendesk", "name": "time_zone_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "time_zone_data.csv", "original_file_path": "seeds/time_zone_data.csv", "unique_id": "seed.zendesk_integration_tests.time_zone_data", "fqn": ["zendesk_integration_tests", "time_zone_data"], "alias": "time_zone_data", "checksum": {"name": "sha256", "checksum": "b02df4f14e54c7deb0b15c40b35196968de4374ceb1cc5ad95986620a506adb2"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728492760.550294, "relation_name": "\"postgres\".\"zz_zendesk\".\"time_zone_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_schedule_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_schedule_data.csv", "original_file_path": "seeds/ticket_schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data", "fqn": ["zendesk_integration_tests", "ticket_schedule_data"], "alias": "ticket_schedule_data", "checksum": {"name": "sha256", "checksum": "dc4892d18f3730242f5319bb24498d77a4c32a666b6b4d5c0eec0d4dafd7224b"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "bigint", "schedule_id": "bigint", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1728492760.551784, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_schedule_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.daylight_time_data": {"database": "postgres", "schema": "zz_zendesk", "name": "daylight_time_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "daylight_time_data.csv", "original_file_path": "seeds/daylight_time_data.csv", "unique_id": "seed.zendesk_integration_tests.daylight_time_data", "fqn": ["zendesk_integration_tests", "daylight_time_data"], "alias": "daylight_time_data", "checksum": {"name": "sha256", "checksum": "17642d90548c6367ab328762a47066a905e3ba2da8831cd86ef37ac659a38fc9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728492760.5531719, "relation_name": "\"postgres\".\"zz_zendesk\".\"daylight_time_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_data": {"database": "postgres", "schema": "zz_zendesk", "name": "user_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data.csv", "original_file_path": "seeds/user_data.csv", "unique_id": "seed.zendesk_integration_tests.user_data", "fqn": ["zendesk_integration_tests", "user_data"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "9f600c24b84ed0183e88c5aaa4e7e02bd2228115bebc85217f04c97bd5b6dbc9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "external_id": "bigint", "locale_id": "bigint", "organization_id": "bigint", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1728492760.554456, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_data": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_data.csv", "original_file_path": "seeds/schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_data", "fqn": ["zendesk_integration_tests", "schedule_data"], "alias": "schedule_data", "checksum": {"name": "sha256", "checksum": "e2596e44df02b53d13b850f9742084141b7b75755baae603c8d3db6b8354107a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "end_time": "bigint", "start_time": "bigint", "end_time_utc": "bigint", "start_time_utc": "bigint", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1728492760.555796, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_tag_data.csv", "original_file_path": "seeds/ticket_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_tag_data", "fqn": ["zendesk_integration_tests", "ticket_tag_data"], "alias": "ticket_tag_data", "checksum": {"name": "sha256", "checksum": "020b25c3247e21387702778ce0af4e5a5b8b3aee62daaa05f48c643489b57ea0"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728492760.556999, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.organization_data": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_data.csv", "original_file_path": "seeds/organization_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_data", "fqn": ["zendesk_integration_tests", "organization_data"], "alias": "organization_data", "checksum": {"name": "sha256", "checksum": "b3e00faed1ea214f73182b110c5f55653a5b43f2bc082dcb87f6c63dea5303c3"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1728492760.55834, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_form_history_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_form_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_form_history_data.csv", "original_file_path": "seeds/ticket_form_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data", "fqn": ["zendesk_integration_tests", "ticket_form_history_data"], "alias": "ticket_form_history_data", "checksum": {"name": "sha256", "checksum": "a5b4edef05a0baa9acac87db3eea1ac0ba55865809db778ff458e20b7352c665"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1728492760.559668, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_form_history_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.group_data": {"database": "postgres", "schema": "zz_zendesk", "name": "group_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "group_data.csv", "original_file_path": "seeds/group_data.csv", "unique_id": "seed.zendesk_integration_tests.group_data", "fqn": ["zendesk_integration_tests", "group_data"], "alias": "group_data", "checksum": {"name": "sha256", "checksum": "ded51f1b267e9785ca862ca30656faa2485b5814d834ea35de6892702c3dbd1a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1728492760.561072, "relation_name": "\"postgres\".\"zz_zendesk\".\"group_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "user_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data.csv", "original_file_path": "seeds/user_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data", "fqn": ["zendesk_integration_tests", "user_tag_data"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "fde0d85263495e783fd6fb342940a4dcd67c39581d55bfc9b28935d24367a096"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "user_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1728492760.562429, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "model.zendesk.zendesk__ticket_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_enriched", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_enriched.sql", "original_file_path": "models/zendesk__ticket_enriched.sql", "unique_id": "model.zendesk.zendesk__ticket_enriched", "fqn": ["zendesk", "zendesk__ticket_enriched"], "alias": "zendesk__ticket_enriched", "checksum": {"name": "sha256", "checksum": "8d5ccce79dd53bd307569a9a086b4205cfebbd616bb74b594766e524a281c244"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk ticket, enriched with data about it's tags, assignees, requester, submitter, organization and group.", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the ticket has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.572698, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"", "raw_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n{% if var('using_ticket_form_history', True) %}\n), latest_ticket_form as (\n\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), latest_satisfaction_ratings as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_satisfaction') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), requester_updates as (\n\n select *\n from {{ ref('int_zendesk__requester_updates') }}\n\n), assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__assignee_updates') }}\n\n), ticket_group as (\n \n select *\n from {{ ref('stg_zendesk__group') }}\n\n), organization as (\n\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n latest_ticket_form.name as ticket_form_name,\n {% endif %}\n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n {% endif %}\n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n requester_org.organization_tags as requester_organization_tags,\n {% endif %}\n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n {% endif %}\n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n {% endif %}\n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "language": "sql", "refs": [{"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}, {"name": "int_zendesk__latest_ticket_form", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_satisfaction", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__requester_updates", "package": null, "version": null}, {"name": "int_zendesk__assignee_updates", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__assignee_updates", "model.zendesk_source.stg_zendesk__group", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_enriched.sql", "compiled": true, "compiled_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n\n), latest_ticket_form as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__latest_ticket_form\"\n\n\n), latest_satisfaction_ratings as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), requester_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_updates\"\n\n), assignee_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__assignee_updates\"\n\n), ticket_group as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\n\n), organization as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n latest_ticket_form.name as ticket_form_name,\n \n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n \n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n requester_org.organization_tags as requester_organization_tags,\n \n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n \n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n \n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n \n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_metrics": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_metrics", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_metrics.sql", "original_file_path": "models/zendesk__ticket_metrics.sql", "unique_id": "model.zendesk.zendesk__ticket_metrics", "fqn": ["zendesk", "zendesk__ticket_metrics"], "alias": "zendesk__ticket_metrics", "checksum": {"name": "sha256", "checksum": "71977e3eeb4ea80a2beb205ad3dde4fb9aac17cb8391df9c39b854e658d124cb"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk Support ticket, enriched with metrics about reply times, resolution times and work times. Calendar and business hours are supported", "columns": {"first_reply_time_calendar_minutes": {"name": "first_reply_time_calendar_minutes", "description": "The number of calendar minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_reply_time_business_minutes": {"name": "first_reply_time_business_minutes", "description": "The number of business minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_reply_time_calendar_minutes": {"name": "total_reply_time_calendar_minutes", "description": "The combined calendar time between all end-user comments and the next public agent response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_solved_at": {"name": "first_solved_at", "description": "The time the ticket was first in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_solved_at": {"name": "last_solved_at", "description": "The time the ticket was last in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_calendar_minutes": {"name": "first_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "final_resolution_calendar_minutes": {"name": "final_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_one_touch_resolution": {"name": "is_one_touch_resolution", "description": "A boolean field indicating that the ticket has one public agent response and is in solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_business_minutes": {"name": "first_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "full_resolution_business_minutes": {"name": "full_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_business_minutes": {"name": "agent_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_business_minutes": {"name": "requester_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_business_minutes": {"name": "solve_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_business_minutes": {"name": "agent_work_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_business_minutes": {"name": "on_hold_time_in_business_minutes", "description": "The combined number of business minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_business_minutes": {"name": "new_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_business_minutes": {"name": "open_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_calendar_minutes": {"name": "agent_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_calendar_minutes": {"name": "requester_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_calendar_minutes": {"name": "solve_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_calendar_minutes": {"name": "agent_work_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_calendar_minutes": {"name": "on_hold_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_agent_comments": {"name": "count_agent_comments", "description": "Count of agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_agent_comments": {"name": "count_public_agent_comments", "description": "Count of public agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_end_user_comments": {"name": "count_end_user_comments", "description": "Count of end user comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_internal_comments": {"name": "count_internal_comments", "description": "Count of internal comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_comments": {"name": "count_public_comments", "description": "Count of public comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_comments": {"name": "total_comments", "description": "Total count of all comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_ticket_handoffs": {"name": "count_ticket_handoffs", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": [], "dev_snowflake": "Count of distinct internal users who have touched/commented on the ticket."}, "unique_assignee_count": {"name": "unique_assignee_count", "description": "The count of unique assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_stations_count": {"name": "assignee_stations_count", "description": "The total number of assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_stations_count": {"name": "group_stations_count", "description": "The total count of group stations within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignee_id": {"name": "first_assignee_id", "description": "Assignee id of the first agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignee_id": {"name": "last_assignee_id", "description": "Assignee id of the last agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_agent_assignment_date": {"name": "first_agent_assignment_date", "description": "The date the first agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_agent_assignment_date": {"name": "last_agent_assignment_date", "description": "The date the last agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignment_to_resolution_calendar_minutes": {"name": "first_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the first assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignment_to_resolution_calendar_minutes": {"name": "last_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the last assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_resolutions": {"name": "count_resolutions", "description": "The count of ticket resolutions", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_reopens": {"name": "count_reopens", "description": "The count of ticket reopen events", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_calendar_minutes": {"name": "new_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_calendar_minutes": {"name": "open_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_agent_replies": {"name": "total_agent_replies", "description": "The total number of agent replies within the ticket, excluding comments where an agent created the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_age_minutes": {"name": "requester_last_login_age_minutes", "description": "The time in minutes since the ticket requester was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_age_minutes": {"name": "assignee_last_login_age_minutes", "description": "The time in minutes since the ticket assignee was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_minutes": {"name": "unsolved_ticket_age_minutes", "description": "The time in minutes the ticket has been in an unsolved state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_since_update_minutes": {"name": "unsolved_ticket_age_since_update_minutes", "description": "The time in minutes the ticket has been unsolved since the last update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_two_touch_resolution": {"name": "is_two_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_multi_touch_resolution": {"name": "is_multi_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two or more public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_last_comment_date": {"name": "ticket_last_comment_date", "description": "The time the last comment was applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_unassigned_duration_calendar_minutes": {"name": "ticket_unassigned_duration_calendar_minutes", "description": "The time in minutes the ticket was in an unassigned state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_status_assignment_date": {"name": "last_status_assignment_date", "description": "The time the status was last changed on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the ticket has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.5869162, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"", "raw_code": "with ticket_enriched as (\n\n select *\n from {{ ref('zendesk__ticket_enriched') }}\n\n), ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_reply_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times_calendar') }}\n\n), ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comment_metrics') }}\n\n), ticket_work_time_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_calendar') }}\n\n-- business hour CTEs\n{% if var('using_schedules', True) %}\n\n), ticket_first_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_resolution_time_business') }}\n\n), ticket_full_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_full_resolution_time_business') }}\n\n), ticket_work_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_business') }}\n\n), ticket_first_reply_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_reply_time_business') }}\n\n{% endif %}\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.requester_last_login_at\", dbt.current_timestamp(), 'second') }} /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.assignee_last_login_at\", dbt.current_timestamp(), 'second') }} /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.created_at\", dbt.current_timestamp(), 'second') }} /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.updated_at\", dbt.current_timestamp(), 'second') }} /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n{% if var('using_schedules', True) %}\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n{% else %}\n\n) \n\nselect *\nfrom calendar_hour_metrics\n\n{% endif %}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}, {"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__comment_metrics", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_full_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_reply_time_business", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.datediff"], "nodes": ["model.zendesk.zendesk__ticket_enriched", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_first_reply_time_business"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n), __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n), __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n), __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n), ticket_enriched as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\n\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_reply_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times_calendar\n\n), ticket_comments as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__comment_metrics\"\n\n), ticket_work_time_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_calendar\n\n-- business hour CTEs\n\n\n), ticket_first_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_resolution_time_business\n\n), ticket_full_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_full_resolution_time_business\n\n), ticket_work_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_business\n\n), ticket_first_reply_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_reply_time_business\n\n\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.requester_last_login_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.requester_last_login_at)::timestamp)))\n /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.assignee_last_login_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.assignee_last_login_at)::timestamp)))\n /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.created_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.created_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.created_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.created_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.updated_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.updated_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.updated_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.updated_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}, {"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "sql": " __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_business", "sql": " __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_summary": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_summary", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_summary.sql", "original_file_path": "models/zendesk__ticket_summary.sql", "unique_id": "model.zendesk.zendesk__ticket_summary", "fqn": ["zendesk", "zendesk__ticket_summary"], "alias": "zendesk__ticket_summary", "checksum": {"name": "sha256", "checksum": "085f6c784b70f6ca6f38a8f3d4defb1debb06049d0bb6fe1b778ad7638d08f2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A single record table containing Zendesk ticket and user summary metrics. These metrics are updated for the current day the model is run.", "columns": {"user_count": {"name": "user_count", "description": "Total count of users created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active_agent_count": {"name": "active_agent_count", "description": "Total count of agents", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_user_count": {"name": "deleted_user_count", "description": "Total deleted user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_user_count": {"name": "end_user_count", "description": "Total end user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended_user_count": {"name": "suspended_user_count", "description": "Total count of users in a suspended state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_ticket_count": {"name": "new_ticket_count", "description": "Total count of tickets in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_ticket_count": {"name": "on_hold_ticket_count", "description": "Total count of tickets in the \"hold\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_ticket_count": {"name": "open_ticket_count", "description": "Total count of tickets in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "pending_ticket_count": {"name": "pending_ticket_count", "description": "Total count of tickets in the \"pending\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solved_ticket_count": {"name": "solved_ticket_count", "description": "Total count of solved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_ticket_count": {"name": "problem_ticket_count", "description": "Total count of tickets labeled as problems", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reassigned_ticket_count": {"name": "reassigned_ticket_count", "description": "Total count of tickets that have been reassigned", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reopened_ticket_count": {"name": "reopened_ticket_count", "description": "Total count of tickets that have been reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "surveyed_satisfaction_ticket_count": {"name": "surveyed_satisfaction_ticket_count", "description": "Total count of tickets that have been surveyed for a satisfaction response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unassigned_unsolved_ticket_count": {"name": "unassigned_unsolved_ticket_count", "description": "Total count of tickets that are unassigned and unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_ticket_count": {"name": "unreplied_ticket_count", "description": "Total count of tickets that have not had a reply", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_unsolved_ticket_count": {"name": "unreplied_unsolved_ticket_count", "description": "Total count of tickets that have not had a reply and are unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_count": {"name": "unsolved_ticket_count", "description": "Total count of unsolved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assigned_ticket_count": {"name": "assigned_ticket_count", "description": "Total count of assigned tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_ticket_count": {"name": "deleted_ticket_count", "description": "Total count of deleted tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recovered_ticket_count": {"name": "recovered_ticket_count", "description": "Total count of tickets that were deleted then reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.591616, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_summary\"", "raw_code": "with ticket_metrics as (\n select *\n from {{ ref('zendesk__ticket_metrics') }}\n\n), user_table as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), user_sum as (\n select\n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int"], "nodes": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_summary.sql", "compiled": true, "compiled_code": "with ticket_metrics as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\n\n), user_table as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), user_sum as (\n select\n cast(1 as integer) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as integer) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_field_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_field_history.sql", "original_file_path": "models/zendesk__ticket_field_history.sql", "unique_id": "model.zendesk.zendesk__ticket_field_history", "fqn": ["zendesk", "zendesk__ticket_field_history"], "alias": "zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "2fea56dd7631d630021a96594da99a1b65affd7ec6d7a5a913ef3fc0b7759949"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable and the corresponding updater fields defined in the `ticket_field_history_updater_columns` variable.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_day_id": {"name": "ticket_day_id", "description": "The unique key of the table, a surrogate key of date_day and ticket_id.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The assignee id assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1728492761.5754502, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"", "raw_code": "{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month' } if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{%- set change_data_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_scd')) -%}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_scd') }}\n \n {% if is_incremental() %}\n where valid_from >= (select max(date_day) from {{ this }})\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from {{ this }}\n where date_day = (select max(date_day) from {{ this }} )\n\n{% endif %}\n\n), calendar as (\n\n select *\n from {{ ref('int_zendesk__field_calendar_spine') }}\n where date_day <= current_date\n {% if is_incremental() %}\n and date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n {% if is_incremental() %} \n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , coalesce(change_data.{{ col.name }}, most_recent_data.{{ col.name }}) as {{ col.name }}\n {% endfor %}\n \n {% else %}\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , {{ col.name }}\n {% endfor %}\n {% endif %}\n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n {% if is_incremental() %}\n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n {% endif %}\n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n , {{ col.name }}\n -- create a batch/partition once a new value is provided\n , sum( case when {{ col.name }} is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as {{ col.name }}_field_partition\n\n {% endfor %}\n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n -- grab the value that started this batch/partition\n , first_value( {{ col.name }} ) over (\n partition by ticket_id, {{ col.name }}_field_partition \n order by date_day asc rows between unbounded preceding and current row) as {{ col.name }}\n {% endfor %}\n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( {{ col.name }} as {{ dbt.type_string() }} ) = 'is_null' then null else {{ col.name }} end as {{ col.name }}\n {% endfor %}\n\n from fill_values\n\n), surrogate_key as (\n\n select\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_calendar_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.type_string"], "nodes": ["model.zendesk.int_zendesk__field_history_scd", "model.zendesk.int_zendesk__field_calendar_spine"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with change_data as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_scd\"\n \n \n where valid_from >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\")\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"\n where date_day = (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\" )\n\n\n\n), calendar as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\"\n where date_day <= current_date\n \n and date_day >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\")\n \n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n \n \n , coalesce(change_data.status, most_recent_data.status) as status\n \n , coalesce(change_data.assignee_id, most_recent_data.assignee_id) as assignee_id\n \n , coalesce(change_data.priority, most_recent_data.priority) as priority\n \n \n \n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n \n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n \n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n \n , status\n -- create a batch/partition once a new value is provided\n , sum( case when status is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as status_field_partition\n\n \n , assignee_id\n -- create a batch/partition once a new value is provided\n , sum( case when assignee_id is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as assignee_id_field_partition\n\n \n , priority\n -- create a batch/partition once a new value is provided\n , sum( case when priority is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as priority_field_partition\n\n \n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n \n -- grab the value that started this batch/partition\n , first_value( status ) over (\n partition by ticket_id, status_field_partition \n order by date_day asc rows between unbounded preceding and current row) as status\n \n -- grab the value that started this batch/partition\n , first_value( assignee_id ) over (\n partition by ticket_id, assignee_id_field_partition \n order by date_day asc rows between unbounded preceding and current row) as assignee_id\n \n -- grab the value that started this batch/partition\n , first_value( priority ) over (\n partition by ticket_id, priority_field_partition \n order by date_day asc rows between unbounded preceding and current row) as priority\n \n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( status as TEXT ) = 'is_null' then null else status end as status\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( assignee_id as TEXT ) = 'is_null' then null else assignee_id end as assignee_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( priority as TEXT ) = 'is_null' then null else priority end as priority\n \n\n from fill_values\n\n), surrogate_key as (\n\n select\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__sla_policies": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__sla_policies", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__sla_policies.sql", "original_file_path": "models/zendesk__sla_policies.sql", "unique_id": "model.zendesk.zendesk__sla_policies", "fqn": ["zendesk", "zendesk__sla_policies"], "alias": "zendesk__sla_policies", "checksum": {"name": "sha256", "checksum": "7f12fd205228c0344bec4ae967a46c692bbede3209008a5648f86be4777550ca"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents an SLA policy event and additional sla breach and achievement metrics. Calendar and business hour SLA breaches for `first_reply_time`, `next_reply_time`, `requester_wait_time`, and `agent_work_time` are supported. If there is a SLA you would like supported that is not included, please create a feature request.", "columns": {"sla_event_id": {"name": "sla_event_id", "description": "A surrogate key generated from the combination of ticket_id, metric, and sla_applied_at fields", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_policy_name": {"name": "sla_policy_name", "description": "The name of the SLA policy associated with the SLA metric", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "metric": {"name": "metric", "description": "The SLA metric, either agent_work_time, requester_wait_time, first_reply_time or next_reply_time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_applied_at": {"name": "sla_applied_at", "description": "When the SLA target was triggered. This is the starting time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "target": {"name": "target", "description": "The SLA target, in minutes", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "in_business_hours": {"name": "in_business_hours", "description": "Boolean field indicating if the SLA target is in business hours (true) or calendar hours (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_breach_at": {"name": "sla_breach_at", "description": "The time or expected time of the SLA breach or achieve event.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_elapsed_time": {"name": "sla_elapsed_time", "description": "The total elapsed time to achieve the SLA metric whether breached or achieved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active_sla": {"name": "is_active_sla", "description": "Boolean field indicating that the SLA event is currently active and not breached (true) or past (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_sla_breach": {"name": "is_sla_breach", "description": "Boolean field indicating if the SLA has been breached (true) or was achieved (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.57473, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__sla_policies\"", "raw_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from {{ ref('int_zendesk__reply_time_combined') }}\n\n), agent_work_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_calendar_hours') }}\n\n), requester_wait_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), agent_work_business_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_business_hours') }}\n\n), requester_wait_business_sla as (\n select *\n from {{ ref('int_zendesk__requester_wait_time_business_hours') }}\n\n{% endif %}\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n{% if var('using_schedules', True) %}\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n{% endif %}\n\n)\n\nselect \n {{ dbt_utils.generate_surrogate_key(['ticket_id', 'metric', 'sla_applied_at']) }} as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then ({{ dbt.datediff(\"sla_applied_at\", dbt.current_timestamp(), 'second') }} / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > {{ dbt.current_timestamp() }})\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_combined", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_business_hours", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.max_bool", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.current_timestamp", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__sla_policies.sql", "compiled": true, "compiled_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_combined\"\n\n), agent_work_calendar_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"\n\n), requester_wait_calendar_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"\n\n\n\n), agent_work_business_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"\n\n), requester_wait_business_sla as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"\n\n\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n\n\n)\n\nselect \n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(metric as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sla_applied_at as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then (\n (\n (\n (\n ((now())::date - (sla_applied_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (sla_applied_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (sla_applied_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (sla_applied_at)::timestamp)))\n / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > now())\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_backlog": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_backlog", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_backlog.sql", "original_file_path": "models/zendesk__ticket_backlog.sql", "unique_id": "model.zendesk.zendesk__ticket_backlog", "fqn": ["zendesk", "zendesk__ticket_backlog"], "alias": "zendesk__ticket_backlog", "checksum": {"name": "sha256", "checksum": "546f8460ab16ce0f4671b1ae5742bfdb0f97bc4184c9da30cd21de81400922f7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable for all backlog tickets. Backlog tickets being defined as any ticket not a 'closed', 'deleted', or 'solved' status.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel where the ticket was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The assignee name assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.5921931, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_backlog\"", "raw_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n{{ config(enabled = 'status' in var('ticket_field_history_columns')) }}\n\nwith ticket_field_history as (\n select *\n from {{ ref('zendesk__ticket_field_history') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), group_names as (\n select *\n from {{ ref('stg_zendesk__group') }}\n\n), users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), brands as (\n select *\n from {{ ref('stg_zendesk__brand') }}\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n{% if 'ticket_form_id' in var('ticket_field_history_columns') %}\n), ticket_forms as (\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), organizations as (\n select *\n from {{ ref('stg_zendesk__organization') }}\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n {% for col in var('ticket_field_history_columns') if col != 'status' %} --Looking at all history fields the users passed through in their dbt_project.yml file\n {% if col in ['assignee_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n {% elif col in ['requester_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,requester.name as requester_name\n\n {% elif col in ['ticket_form_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,ticket_forms.name as ticket_form_name\n\n {% elif col in ['organization_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,organizations.name as organization_name\n\n {% elif col in ['brand_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,brands.name as brand_name\n\n {% elif col in ['group_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,group_names.name as group_name\n\n {% elif col in ['locale_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.locale as local_name\n\n {% else %} --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.{{ col }}\n {% endif %}\n {% endfor %}\n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n {% if 'ticket_form_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join ticket_forms\n on ticket_forms.ticket_form_id = cast(ticket_field_history.ticket_form_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'group_id' in var('ticket_field_history_columns') %}--Join not needed if field is not located in variable, otherwise it is included.\n left join group_names\n on group_names.group_id = cast(ticket_field_history.group_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'assignee_id' in var('ticket_field_history_columns') or 'requester_id' in var('ticket_field_history_columns') or 'locale_id' in var('ticket_field_history_columns')%} --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'requester_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join users as requester\n on requester.user_id = cast(ticket_field_history.requester_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'brand_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join brands\n on brands.brand_id = cast(ticket_field_history.brand_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'organization_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join organizations\n on organizations.organization_id = cast(ticket_field_history.organization_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "language": "sql", "refs": [{"name": "zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}, {"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_bigint"], "nodes": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_backlog.sql", "compiled": true, "compiled_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n\n\nwith ticket_field_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"\n\n), tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), group_names as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), brands as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n --Looking at all history fields the users passed through in their dbt_project.yml file\n --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n \n --Looking at all history fields the users passed through in their dbt_project.yml file\n --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.priority\n \n \n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n \n\n \n\n --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as bigint)\n \n\n \n\n \n\n \n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__sla_policy_applied": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/int_zendesk__sla_policy_applied.sql", "original_file_path": "models/sla_policy/int_zendesk__sla_policy_applied.sql", "unique_id": "model.zendesk.int_zendesk__sla_policy_applied", "fqn": ["zendesk", "sla_policy", "int_zendesk__sla_policy_applied"], "alias": "int_zendesk__sla_policy_applied", "checksum": {"name": "sha256", "checksum": "e3fdf31f14e332d08049e6ad3a865a8a8776755ada75ddb655a6cc72a61b9d15"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.790267, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"", "raw_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), sla_policy_name as (\n\n select \n *\n from {{ ref('int_zendesk__updates') }}\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast({{ fivetran_utils.json_parse('ticket_field_history.value', ['minutes']) }} as {{ dbt.type_int() }} ) as target,\n {{ fivetran_utils.json_parse('ticket_field_history.value', ['in_business_hours']) }} = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, {{ dbt.current_timestamp() }}) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.json_parse", "macro.dbt.type_int", "macro.dbt.current_timestamp"], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__ticket_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/int_zendesk__sla_policy_applied.sql", "compiled": true, "compiled_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), sla_policy_name as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast(\n\n ticket_field_history.value::json #>> '{minutes}'\n\n as integer ) as target,\n \n\n ticket_field_history.value::json #>> '{in_business_hours}'\n\n = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, now()) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_business_hours"], "alias": "int_zendesk__agent_work_time_business_hours", "checksum": {"name": "sha256", "checksum": "430c95ca8321909d770cb8caae56a0bdc90d91b889969ddcdfb4725b1bc5f903"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492760.796444, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n \n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_agent_work_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('valid_starting_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \" )\"\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n)\n\nselect * \nfrom agent_work_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_agent_work_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp ) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n)\n\nselect * \nfrom agent_work_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_calendar_hours"], "alias": "int_zendesk__agent_work_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "f25752139fd2e10c5d666783a5abbf36e9d81b6a4e0012f6e42d816e8d20aa81"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.819922, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"", "raw_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_filtered_statuses"], "alias": "int_zendesk__agent_work_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "3d9208f477a6aa3dcf000568e9ca35d8edbdc8c7d47223f34bb1f1aa0f609902"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.8250492, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"", "raw_code": "with agent_work_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with agent_work_time_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n now() + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_business_hours"], "alias": "int_zendesk__reply_time_business_hours", "checksum": {"name": "sha256", "checksum": "12c0706c03db8c187b66676360dc7ae36eb9db9b9c36324366854ec9ca03448d"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492760.829491, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), ticket_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from {{ ref('stg_zendesk__schedule') }}\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(sla_policy_applied.sla_applied_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n {{ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') }} as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_created_at') }} <= sla_policy_applied.sla_applied_at\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_invalidated_at') }} > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n {{ dbt_utils.group_by(n=14) }}\n\n), week_index_calc as (\n select \n *,\n {{ dbt.datediff(\"sla_applied_at\", \"least(coalesce(first_reply_time, \" ~ dbt.current_timestamp() ~ \"), coalesce(first_solved_time, \" ~ dbt.current_timestamp() ~ \"))\", \"week\") }} + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast((7*24*60) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n {{ dbt_date.week_start('sla_applied_at','UTC') }} as starting_point,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_breach_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_start_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_start_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_end_at,\n {{ dbt_date.week_end(\"sla_applied_at\", tz=\"America/UTC\") }} as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "language": "sql", "refs": [{"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.fivetran_utils.timestamp_add", "macro.dbt_utils.group_by", "macro.dbt.current_timestamp", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt_date.week_end"], "nodes": ["model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), sla_policy_applied as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), ticket_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n (\n (\n (\n (\n ((cast(sla_policy_applied.sla_applied_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and \n\n ticket_schedules.schedule_created_at + ((interval '1 second') * (-1))\n\n <= sla_policy_applied.sla_applied_at\n and \n\n ticket_schedules.schedule_invalidated_at + ((interval '1 second') * (-1))\n\n > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10,11,12,13,14\n\n), week_index_calc as (\n select \n *,\n \n (\n ((least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::date - (sla_applied_at)::date)\n / 7 + case\n when date_part('dow', (sla_applied_at)::timestamp) <= date_part('dow', (least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::timestamp) then\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 0 else -1 end\n else\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 1 else 0 end\n end)\n + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as integer) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast((7*24*60) as integer) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as starting_point,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as integer )))\n\n as sla_breach_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_start_time) as integer )))\n\n as sla_schedule_start_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time) as integer )))\n\n as sla_schedule_end_at,\n cast(\n\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) + ((interval '1 day') * (6))\n\n as date) as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_calendar_hours"], "alias": "int_zendesk__reply_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "6ec2775efbac4d405efd0b30a1ec5c593e140c3f4a1be4ff8df7fd0cd4791a2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.846275, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"", "raw_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), final as (\n select\n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(target as \" ~ dbt.type_int() ~ \" )\",\n \"sla_applied_at\" ) }} as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "compiled": true, "compiled_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), final as (\n select\n *,\n \n\n sla_applied_at + ((interval '1 minute') * (cast(target as integer )))\n\n as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_combined": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_combined", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_combined", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_combined"], "alias": "int_zendesk__reply_time_combined", "checksum": {"name": "sha256", "checksum": "3a7a8ddea0400ea314ff4ae83b81654414788634e76af330bf27c384733ac43b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.8504808, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_combined\"", "raw_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from {{ ref('int_zendesk__reply_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), reply_time_business_hours_sla as (\n\n select *\n from {{ ref('int_zendesk__reply_time_business_hours') }}\n\n{% endif %}\n\n), ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as {{ dbt.type_numeric() }}) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as {{ dbt.type_numeric() }}) as week_number,\n cast(null as {{ dbt.type_numeric() }}) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n{% if var('using_schedules', True) %}\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n{% endif %}\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n {{ dbt_utils.group_by(n=10) }}\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n {{ dbt.datediff(\"sla_schedule_start_at\", \"agent_reply_at\", 'second') }} / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and {{ dbt.current_timestamp() }} >= sla_schedule_start_at and ({{ dbt.current_timestamp() }} < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= {{ dbt.current_timestamp() }}) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n {{ dbt.current_timestamp() }} as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + ({{ dbt.datediff(\"sla_schedule_start_at\", \"coalesce(agent_reply_at, next_solved_at, current_time_check)\", 'second') }} / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__reply_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_numeric", "macro.dbt_utils.group_by", "macro.dbt.datediff", "macro.dbt.current_timestamp"], "nodes": ["model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "compiled": true, "compiled_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"\n\n\n\n), reply_time_business_hours_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"\n\n\n\n), ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as numeric(28,6)) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as numeric(28,6)) as week_number,\n cast(null as numeric(28,6)) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n \n (\n (\n (\n ((agent_reply_at)::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (agent_reply_at)::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (agent_reply_at)::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (agent_reply_at)::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and now() >= sla_schedule_start_at and (now() < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= now()) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n now() as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + (\n (\n (\n (\n ((coalesce(agent_reply_at, next_solved_at, current_time_check))::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_calendar_hours"], "alias": "int_zendesk__requester_wait_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "adaa86b537177e2792f3b8e48def56a520c6a442b11f3859c649f549d4b60087"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.859007, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"", "raw_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_business_hours"], "alias": "int_zendesk__requester_wait_time_business_hours", "checksum": {"name": "sha256", "checksum": "5562a77785bebf0f99e2d574f4b762ca5149c3c92245a7e35b345bf3ffb1cb00"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492760.8642151, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes --- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_requester_wait_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('valid_starting_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \" )\"\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes --- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_requester_wait_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp ) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_filtered_statuses"], "alias": "int_zendesk__requester_wait_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "3dcdd6a267ee2ec704192d6e14b7af92ba52316f66389455c5bf3d0c73649188"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.873724, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"", "raw_code": "with requester_wait_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with requester_wait_time_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n now() + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_reply_times", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times"], "alias": "int_zendesk__ticket_reply_times", "checksum": {"name": "sha256", "checksum": "6de1b30f99a9bbd078c823538ca0e87c5b57d33160f65c290ecd67765e8d4472"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.878148, "relation_name": null, "raw_code": "with ticket_public_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n ({{ dbt.datediff(\n 'end_user_comment_created_at',\n 'agent_responded_at',\n 'second') }} / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_reply_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times_calendar.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times_calendar"], "alias": "int_zendesk__ticket_reply_times_calendar", "checksum": {"name": "sha256", "checksum": "6fb6a60134019d78fcfc8c135b4a7887b3ce52ec53d8db463194f7824d2c71c2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.881825, "relation_name": null, "raw_code": "with ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_reply_times"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comments_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comments_enriched", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__comments_enriched.sql", "original_file_path": "models/reply_times/int_zendesk__comments_enriched.sql", "unique_id": "model.zendesk.int_zendesk__comments_enriched", "fqn": ["zendesk", "reply_times", "int_zendesk__comments_enriched"], "alias": "int_zendesk__comments_enriched", "checksum": {"name": "sha256", "checksum": "970004a2aa343ae78a3f810828600c7eca8585428b52b05e4353f9debc6f1af5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.885153, "relation_name": null, "raw_code": "with ticket_comment as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'comment'\n\n), users as (\n\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__comments_enriched.sql", "compiled": true, "compiled_code": "with ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_reply_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_first_reply_time_business", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_first_reply_time_business.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_first_reply_time_business"], "alias": "int_zendesk__ticket_first_reply_time_business", "checksum": {"name": "sha256", "checksum": "0bacc5f74a5eac2a55c2b0bacb1a0b7908783948ad162b84c230be9310dd02b5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728492760.8865888, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n), ticket_schedules as (\n\n select \n *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_enriched", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_enriched.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_enriched.sql", "unique_id": "model.zendesk.int_zendesk__field_history_enriched", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_enriched"], "alias": "int_zendesk__field_history_enriched", "checksum": {"name": "sha256", "checksum": "cdf920b1df5fee8c6a08b0e26996028d327964903e8acc4dd15498d23c00005c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.893893, "relation_name": null, "raw_code": "with ticket_field_history as (\n\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), updater_info as (\n select *\n from {{ ref('int_zendesk__updater_information') }}\n\n), final as (\n select\n ticket_field_history.*\n\n {% if var('ticket_field_history_updater_columns')%} --The below will be run if any fields are included in the variable within the dbt_project.yml.\n {% for col in var('ticket_field_history_updater_columns') %} --Iterating through the updater fields included in the variable.\n\n --The below statements are needed to populate Zendesk automated fields for when the zendesk triggers automatically change fields based on user defined triggers.\n {% if col in ['updater_is_active'] %}\n ,coalesce(updater_info.{{ col|lower }}, true) as {{ col }}\n\n {% elif col in ['updater_user_id','updater_organization_id'] %}\n ,coalesce(updater_info.{{ col|lower }}, -1) as {{ col }}\n \n {% elif col in ['updater_last_login_at'] %}\n ,coalesce(updater_info.{{ col|lower }}, current_timestamp) as {{ col }}\n \n {% else %}\n ,coalesce(updater_info.{{ col|lower }}, concat('zendesk_trigger_change_', '{{ col }}' )) as {{ col }}\n \n {% endif %}\n {% endfor %}\n {% endif %} \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "int_zendesk__updater_information", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk.int_zendesk__updater_information"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_enriched.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_pivot": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_pivot", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_pivot.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_pivot.sql", "unique_id": "model.zendesk.int_zendesk__field_history_pivot", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_pivot"], "alias": "int_zendesk__field_history_pivot", "checksum": {"name": "sha256", "checksum": "077bf8d76ba0523c2ebb987be0fd0746acbdae8fdbdd39fc7a03203a5d070f87"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1728492760.899085, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\"", "raw_code": "-- depends_on: {{ source('zendesk', 'ticket_field_history') }}\n\n{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{% if execute -%}\n {% set results = run_query('select distinct field_name from ' ~ source('zendesk', 'ticket_field_history') ) %}\n {% set results_list = results.columns[0].values() %}\n{% endif -%}\n\nwith field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n ,\n {{ var('ticket_field_history_updater_columns') | join (\", \")}}\n\n {% endif %}\n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from {{ ref('int_zendesk__field_history_enriched') }}\n {% if is_incremental() %}\n where cast( {{ dbt.date_trunc('day', 'valid_starting_at') }} as date) >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast({{ dbt.date_trunc('day', 'valid_starting_at') }} as date) as date_day\n\n {% for col in results_list if col in var('ticket_field_history_columns') %}\n {% set col_xf = col|lower %}\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.value end) as {{ col_xf }}\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n\n {% for upd in var('ticket_field_history_updater_columns') %}\n\n {% set upd_xf = (col|lower + '_' + upd ) %} --Creating the appropriate column name based on the history field + update field names.\n\n {% if upd == 'updater_is_active' and target.type in ('postgres', 'redshift') %}\n\n ,bool_or(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% else %}\n\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% endif %}\n {% endfor %}\n {% endif %}\n {% endfor %}\n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n {{ dbt_utils.generate_surrogate_key(['ticket_id','date_day'])}} as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_enriched", "package": null, "version": null}], "sources": [["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.date_trunc", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.run_query"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history", "model.zendesk.int_zendesk__field_history_enriched"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_pivot.sql", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"\n\n\n\n\n \nwith __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n), field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from __dbt__cte__int_zendesk__field_history_enriched\n \n where cast( date_trunc('day', valid_starting_at) as date) >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\")\n \n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast(date_trunc('day', valid_starting_at) as date) as date_day\n\n \n \n ,min(case when lower(field_name) = 'status' then filtered.value end) as status\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'assignee_id' then filtered.value end) as assignee_id\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'priority' then filtered.value end) as priority\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}, {"id": "model.zendesk.int_zendesk__field_history_enriched", "sql": " __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updater_information": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updater_information", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__updater_information.sql", "original_file_path": "models/ticket_history/int_zendesk__updater_information.sql", "unique_id": "model.zendesk.int_zendesk__updater_information", "fqn": ["zendesk", "ticket_history", "int_zendesk__updater_information"], "alias": "int_zendesk__updater_information", "checksum": {"name": "sha256", "checksum": "62a690646cff991c0e0b6e205440a070bb44aab8d4d9286714710c52a4c6677a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.908752, "relation_name": null, "raw_code": "with users as (\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), organizations as (\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,users.user_tags as updater_user_tags\n {% endif %}\n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,organizations.domain_names as updater_organization_domain_names\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,organizations.organization_tags as updater_organization_organization_tags\n {% endif %}\n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__updater_information.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_scd": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_scd", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_scd.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_scd.sql", "unique_id": "model.zendesk.int_zendesk__field_history_scd", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_scd"], "alias": "int_zendesk__field_history_scd", "checksum": {"name": "sha256", "checksum": "a748f9163dc6edaca993c8a3f5e3cecc9d057d3b47817d403e0b0778deda2466"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492760.913312, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_scd\"", "raw_code": "-- model needs to materialize as a table to avoid erroneous null values\n{{ config( materialized='table') }} \n\n{% set ticket_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_pivot')) %}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_pivot') }}\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,{{ col.name }}\n ,sum(case when {{ col.name }} is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as {{ col.name }}_field_partition\n {% endfor %}\n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,first_value( {{ col.name }} ) over (partition by {{ col.name }}_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as {{ col.name }}\n \n {% endfor %}\n from set_values\n) \n\nselect *\nfrom fill_values", "language": "sql", "refs": [{"name": "int_zendesk__field_history_pivot", "package": null, "version": null}, {"name": "int_zendesk__field_history_pivot", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__field_history_pivot"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_scd.sql", "compiled": true, "compiled_code": "-- model needs to materialize as a table to avoid erroneous null values\n \n\n\n\nwith change_data as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\"\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n \n\n ,status\n ,sum(case when status is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as status_field_partition\n \n\n ,assignee_id\n ,sum(case when assignee_id is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as assignee_id_field_partition\n \n\n ,priority\n ,sum(case when priority is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as priority_field_partition\n \n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n \n\n ,first_value( status ) over (partition by status_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as status\n \n \n\n ,first_value( assignee_id ) over (partition by assignee_id_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as assignee_id\n \n \n\n ,first_value( priority ) over (partition by priority_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as priority\n \n \n from set_values\n) \n\nselect *\nfrom fill_values", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_calendar_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_calendar_spine.sql", "original_file_path": "models/ticket_history/int_zendesk__field_calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__field_calendar_spine", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_calendar_spine"], "alias": "int_zendesk__field_calendar_spine", "checksum": {"name": "sha256", "checksum": "01739353b5d9fec39fe39ca428ceb43b51a64bd7408d1f4825fcf1d598fb15ca"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1728492760.950457, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\"", "raw_code": "{{\n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n )\n}}\n\nwith calendar as (\n\n select *\n from {{ ref('int_zendesk__calendar_spine') }}\n {% if is_incremental() %}\n where date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( {{ dbt.date_trunc('day', \"case when status != 'closed' then \" ~ dbt.current_timestamp() ~ \" else updated_at end\") }} as date) as open_until\n from {{ var('ticket') }}\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and {{ dbt.dateadd('month', var('ticket_field_history_extension_months', 0), 'ticket.open_until') }} >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__calendar_spine", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.current_timestamp", "macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_utils.generate_surrogate_key"], "nodes": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_calendar_spine.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwith spine as (\n\n \n\n \n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n + \n \n p11.generated_number * power(2, 11)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n cross join \n \n p as p11\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 3211\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2016-01-01' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast\n), calendar as (\n\n select *\n from __dbt__cte__int_zendesk__calendar_spine\n \n where date_day >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\")\n \n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( date_trunc('day', case when status != 'closed' then now() else updated_at end) as date) as open_until\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and \n\n ticket.open_until + ((interval '1 month') * (0))\n\n >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__calendar_spine", "sql": " __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwith spine as (\n\n \n\n \n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n + \n \n p11.generated_number * power(2, 11)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n cross join \n \n p as p11\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 3211\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2016-01-01' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_work_time_calendar", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_calendar"], "alias": "int_zendesk__ticket_work_time_calendar", "checksum": {"name": "sha256", "checksum": "e3cda559c663cc0e6ef1defcf5d8c418bbb9c20bb60aa118fc698579b3c37814"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.957124, "relation_name": null, "raw_code": "with ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "compiled": true, "compiled_code": "with ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_work_time_business", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_business.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_business", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_business"], "alias": "int_zendesk__ticket_work_time_business", "checksum": {"name": "sha256", "checksum": "9ea4023c98c8bdebaf01445490e058d4766cb32a45db569e01e91fa8eac2e689"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728492760.958596, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where {{ dbt.datediff('greatest(valid_starting_at, schedule_created_at)', 'least(valid_ending_at, schedule_invalidated_at)', 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.status_schedule_start as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.status_schedule_start',\n 'ticket_status_crossed_with_schedule.status_schedule_end',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=7) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "compiled": true, "compiled_code": "\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__calendar_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "utils/int_zendesk__calendar_spine.sql", "original_file_path": "models/utils/int_zendesk__calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__calendar_spine", "fqn": ["zendesk", "utils", "int_zendesk__calendar_spine"], "alias": "int_zendesk__calendar_spine", "checksum": {"name": "sha256", "checksum": "722fbe199f8263916801adf6a6f035c8dc37de056bbd359bd9c42f834b3f3ef3"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.966472, "relation_name": null, "raw_code": "-- depends_on: {{ var('ticket') }}\nwith spine as (\n\n {% if execute and flags.WHICH in ('run', 'build') %}\n\n {%- set first_date_query %}\n select \n coalesce(\n min(cast(created_at as date)), \n cast({{ dbt.dateadd(\"month\", -1, \"current_date\") }} as date)\n ) as min_date\n from {{ var('ticket') }}\n -- by default take all the data \n where cast(created_at as date) >= {{ dbt.dateadd('year', \n - var('ticket_field_history_timeframe_years', 50), \"current_date\") }}\n {% endset -%}\n\n {%- set first_date = dbt_utils.get_single_value(first_date_query) %}\n\n {% else %}\n {%- set first_date = '2016-01-01' %}\n\n {% endif %}\n\n{{\n dbt_utils.date_spine(\n datepart = \"day\", \n start_date = \"cast('\" ~ first_date ~ \"' as date)\",\n end_date = dbt.dateadd(\"week\", 1, \"current_date\")\n ) \n}}\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_utils.date_spine"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/utils/int_zendesk__calendar_spine.sql", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwith spine as (\n\n \n\n \n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n + \n \n p11.generated_number * power(2, 11)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n cross join \n \n p as p11\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 3211\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2016-01-01' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__timezone_daylight": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__timezone_daylight", "resource_type": "model", "package_name": "zendesk", "path": "utils/int_zendesk__timezone_daylight.sql", "original_file_path": "models/utils/int_zendesk__timezone_daylight.sql", "unique_id": "model.zendesk.int_zendesk__timezone_daylight", "fqn": ["zendesk", "utils", "int_zendesk__timezone_daylight"], "alias": "int_zendesk__timezone_daylight", "checksum": {"name": "sha256", "checksum": "021f733ee1abac848fb9d6cfff1c4981f24919f7ff0f59e9c2895654831d9dd8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728492760.9816241, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith timezone as (\n\n select *\n from {{ var('time_zone') }}\n\n), daylight_time as (\n\n select *\n from {{ var('daylight_time') }}\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp() }} as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as {{ dbt.type_timestamp() }}) as valid_from,\n cast(valid_until as {{ dbt.type_timestamp() }}) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.dateadd", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone", "model.zendesk_source.stg_zendesk__daylight_time"]}, "compiled_path": "target/compiled/zendesk/models/utils/int_zendesk__timezone_daylight.sql", "compiled": true, "compiled_code": "\n\nwith timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n now() + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n now() + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(now() as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as timestamp) as valid_from,\n cast(valid_until as timestamp) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_resolution_times_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_resolution_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_resolution_times_calendar"], "alias": "int_zendesk__ticket_resolution_times_calendar", "checksum": {"name": "sha256", "checksum": "0c3e1e19084b3e1829c18b80315e8f64aaf63e94522fc56d64652e89b02afadc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728492760.9876359, "relation_name": null, "raw_code": "with historical_solved_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n where status = 'solved'\n\n), ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_historical_assignee as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_assignee') }}\n\n), ticket_historical_group as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_group') }}\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n {{ dbt.datediff(\n 'ticket_historical_assignee.first_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as first_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket_historical_assignee.last_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as last_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.first_solved_at',\n 'minute' ) }} as first_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.last_solved_at',\n 'minute') }} as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_assignee", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "compiled": true, "compiled_code": "with historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_resolution_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_first_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_first_resolution_time_business"], "alias": "int_zendesk__ticket_first_resolution_time_business", "checksum": {"name": "sha256", "checksum": "92b30d97de3fa5a059b70ef930d731bc7cfeb93a39206970f37ed605264c01af"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728492760.993813, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_full_resolution_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_full_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_full_resolution_time_business"], "alias": "int_zendesk__ticket_full_resolution_time_business", "checksum": {"name": "sha256", "checksum": "c14c73bcfcc33dc8bc6a94827770c47f4e70f4608f3227bbbc1f10cbcad4c572"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728492761.003202, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/zendesk__document.sql", "original_file_path": "models/unstructured/zendesk__document.sql", "unique_id": "model.zendesk.zendesk__document", "fqn": ["zendesk", "unstructured", "zendesk__document"], "alias": "zendesk__document", "checksum": {"name": "sha256", "checksum": "0d3d8f2e10bcc679a958386cd5b13f616e17139821263f12c8dddef34c93b21b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk ticket, enriched with data about it's tags, assignees, requester, submitter, organization and group.", "columns": {"document_id": {"name": "document_id", "description": "Equivalent to `ticket_id`.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk_index": {"name": "chunk_index", "description": "The index of the chunk associated with the `document_id`.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk_tokens_approximate": {"name": "chunk_tokens_approximate", "description": "Approximate number of tokens for the chunk, assuming 4 characters per token.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk": {"name": "chunk", "description": "The text of the chunk.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/unstructured/zendesk_unstructured.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.674829, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith ticket_document as (\n select *\n from {{ ref('int_zendesk__ticket_document') }}\n\n), grouped as (\n select *\n from {{ ref('int_zendesk__ticket_comment_documents_grouped') }}\n\n), final as (\n select\n cast(ticket_document.ticket_id as {{ dbt.type_string() }}) as document_id,\n grouped.chunk_index,\n grouped.chunk_tokens as chunk_tokens_approximate,\n {{ dbt.concat([\n \"ticket_document.ticket_markdown\",\n \"'\\\\n\\\\n## COMMENTS\\\\n\\\\n'\",\n \"grouped.comments_group_markdown\"]) }}\n as chunk\n from ticket_document\n join grouped\n on grouped.ticket_id = ticket_document.ticket_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__ticket_document", "package": null, "version": null}, {"name": "int_zendesk__ticket_comment_documents_grouped", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.concat"], "nodes": ["model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__ticket_comment_documents_grouped"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/zendesk__document.sql", "compiled": true, "compiled_code": "\n\nwith ticket_document as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_document\"\n\n), grouped as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_documents_grouped\"\n\n), final as (\n select\n cast(ticket_document.ticket_id as TEXT) as document_id,\n grouped.chunk_index,\n grouped.chunk_tokens as chunk_tokens_approximate,\n ticket_document.ticket_markdown || '\\n\\n## COMMENTS\\n\\n' || grouped.comments_group_markdown\n as chunk\n from ticket_document\n join grouped\n on grouped.ticket_id = ticket_document.ticket_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_comment_documents_grouped": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_documents_grouped", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "unique_id": "model.zendesk.int_zendesk__ticket_comment_documents_grouped", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_comment_documents_grouped"], "alias": "int_zendesk__ticket_comment_documents_grouped", "checksum": {"name": "sha256", "checksum": "ad03266e19d20396ca75812cb98816f3e11e078c63c30807790903674f4db42b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.014288, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_documents_grouped\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith filtered_comment_documents as (\n select *\n from {{ ref('int_zendesk__ticket_comment_document') }}\n),\n\ngrouped_comment_documents as (\n select \n ticket_id,\n comment_markdown,\n comment_tokens,\n comment_time,\n sum(comment_tokens) over (\n partition by ticket_id \n order by comment_time\n rows between unbounded preceding and current row\n ) as cumulative_length\n from filtered_comment_documents\n)\n\nselect \n ticket_id,\n cast({{ dbt_utils.safe_divide('floor(cumulative_length - 1)', var('zendesk_max_tokens', 5000)) }} as {{ dbt.type_int() }}) as chunk_index,\n {{ dbt.listagg(\n measure=\"comment_markdown\",\n delimiter_text=\"'\\\\n\\\\n---\\\\n\\\\n'\",\n order_by_clause=\"order by comment_time\"\n ) }} as comments_group_markdown,\n sum(comment_tokens) as chunk_tokens\nfrom grouped_comment_documents\ngroup by 1,2", "language": "sql", "refs": [{"name": "int_zendesk__ticket_comment_document", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.safe_divide", "macro.dbt.type_int", "macro.dbt.listagg"], "nodes": ["model.zendesk.int_zendesk__ticket_comment_document"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "compiled": true, "compiled_code": "\n\nwith filtered_comment_documents as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_document\"\n),\n\ngrouped_comment_documents as (\n select \n ticket_id,\n comment_markdown,\n comment_tokens,\n comment_time,\n sum(comment_tokens) over (\n partition by ticket_id \n order by comment_time\n rows between unbounded preceding and current row\n ) as cumulative_length\n from filtered_comment_documents\n)\n\nselect \n ticket_id,\n cast(\n ( floor(cumulative_length - 1) ) / nullif( ( 5000 ), 0)\n as integer) as chunk_index,\n \n string_agg(\n comment_markdown,\n '\\n\\n---\\n\\n'\n order by comment_time\n ) as comments_group_markdown,\n sum(comment_tokens) as chunk_tokens\nfrom grouped_comment_documents\ngroup by 1,2", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_comment_document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "unique_id": "model.zendesk.int_zendesk__ticket_comment_document", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_comment_document"], "alias": "int_zendesk__ticket_comment_document", "checksum": {"name": "sha256", "checksum": "e75f893dec0ca7599db16793ad9b39bf5d33f463abe6fa4d7be8019e095f45d8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.022666, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith ticket_comments as (\n select *\n from {{ var('ticket_comment') }}\n\n), users as (\n select *\n from {{ var('user') }}\n\n), comment_details as (\n select \n ticket_comments.ticket_comment_id,\n ticket_comments.ticket_id,\n {{ zendesk.coalesce_cast([\"users.email\", \"'UNKNOWN'\"], dbt.type_string()) }} as commenter_email,\n {{ zendesk.coalesce_cast([\"users.name\", \"'UNKNOWN'\"], dbt.type_string()) }} as commenter_name,\n ticket_comments.created_at as comment_time,\n ticket_comments.body as comment_body\n from ticket_comments\n left join users\n on ticket_comments.user_id = users.user_id\n where not coalesce(ticket_comments._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), comment_markdowns as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n cast(\n {{ dbt.concat([\n \"'### message from '\", \"commenter_name\", \"' ('\", \"commenter_email\", \"')\\\\n'\",\n \"'##### sent @ '\", \"comment_time\", \"'\\\\n'\",\n \"comment_body\"\n ]) }} as {{ dbt.type_string() }})\n as comment_markdown\n from comment_details\n\n), comments_tokens as (\n select\n *,\n {{ zendesk.count_tokens(\"comment_markdown\") }} as comment_tokens\n from comment_markdowns\n\n), truncated_comments as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n case when comment_tokens > {{ var('zendesk_max_tokens', 5000) }} then left(comment_markdown, {{ var('zendesk_max_tokens', 5000) }} * 4) -- approximate 4 characters per token\n else comment_markdown\n end as comment_markdown,\n case when comment_tokens > {{ var('zendesk_max_tokens', 5000) }} then {{ var('zendesk_max_tokens', 5000) }}\n else comment_tokens\n end as comment_tokens\n from comments_tokens\n)\n\nselect *\nfrom truncated_comments", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.zendesk.coalesce_cast", "macro.dbt.concat", "macro.zendesk.count_tokens"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "compiled": true, "compiled_code": "\n\nwith ticket_comments as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), comment_details as (\n select \n ticket_comments.ticket_comment_id,\n ticket_comments.ticket_id,\n \n coalesce(\n cast(users.email as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as commenter_email,\n \n coalesce(\n cast(users.name as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as commenter_name,\n ticket_comments.created_at as comment_time,\n ticket_comments.body as comment_body\n from ticket_comments\n left join users\n on ticket_comments.user_id = users.user_id\n where not coalesce(ticket_comments._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), comment_markdowns as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n cast(\n '### message from ' || commenter_name || ' (' || commenter_email || ')\\n' || '##### sent @ ' || comment_time || '\\n' || comment_body as TEXT)\n as comment_markdown\n from comment_details\n\n), comments_tokens as (\n select\n *,\n \n \n\n length(\n comment_markdown\n ) / 4 -- 1 token is approximately 4 characters, and we only need an approximation here.\n as comment_tokens\n from comment_markdowns\n\n), truncated_comments as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n case when comment_tokens > 5000 then left(comment_markdown, 5000 * 4) -- approximate 4 characters per token\n else comment_markdown\n end as comment_markdown,\n case when comment_tokens > 5000 then 5000\n else comment_tokens\n end as comment_tokens\n from comments_tokens\n)\n\nselect *\nfrom truncated_comments", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_document.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_document.sql", "unique_id": "model.zendesk.int_zendesk__ticket_document", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_document"], "alias": "int_zendesk__ticket_document", "checksum": {"name": "sha256", "checksum": "1fd6807d45c4904ff1ecbc4b929c675ae0b766b40a711641af85cfe4c6cae4ec"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.0344992, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith tickets as (\n select *\n from {{ var('ticket') }}\n\n), users as (\n select *\n from {{ var('user') }}\n\n), ticket_details as (\n select\n tickets.ticket_id,\n tickets.subject AS ticket_name,\n {{ zendesk.coalesce_cast([\"users.name\", \"'UNKNOWN'\"], dbt.type_string()) }} as user_name,\n {{ zendesk.coalesce_cast([\"users.email\", \"'UNKNOWN'\"], dbt.type_string()) }} as created_by,\n tickets.created_at AS created_on,\n {{ zendesk.coalesce_cast([\"tickets.status\", \"'UNKNOWN'\"], dbt.type_string()) }} as status,\n {{ zendesk.coalesce_cast([\"tickets.priority\", \"'UNKNOWN'\"], dbt.type_string()) }} as priority\n from tickets\n left join users\n on tickets.requester_id = users.user_id\n where not coalesce(tickets._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), final as (\n select\n ticket_id,\n {{ dbt.concat([\n \"'# Ticket : '\", \"ticket_name\", \"'\\\\n\\\\n'\",\n \"'Created By : '\", \"user_name\", \"' ('\", \"created_by\", \"')\\\\n'\",\n \"'Created On : '\", \"created_on\", \"'\\\\n'\",\n \"'Status : '\", \"status\", \"'\\\\n'\",\n \"'Priority : '\", \"priority\"\n ]) }} as ticket_markdown\n from ticket_details\n)\n\nselect \n *,\n {{ zendesk.count_tokens(\"ticket_markdown\") }} as ticket_tokens\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.zendesk.coalesce_cast", "macro.dbt.concat", "macro.zendesk.count_tokens"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_document.sql", "compiled": true, "compiled_code": "\n\nwith tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), ticket_details as (\n select\n tickets.ticket_id,\n tickets.subject AS ticket_name,\n \n coalesce(\n cast(users.name as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as user_name,\n \n coalesce(\n cast(users.email as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as created_by,\n tickets.created_at AS created_on,\n \n coalesce(\n cast(tickets.status as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as status,\n \n coalesce(\n cast(tickets.priority as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as priority\n from tickets\n left join users\n on tickets.requester_id = users.user_id\n where not coalesce(tickets._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), final as (\n select\n ticket_id,\n '# Ticket : ' || ticket_name || '\\n\\n' || 'Created By : ' || user_name || ' (' || created_by || ')\\n' || 'Created On : ' || created_on || '\\n' || 'Status : ' || status || '\\n' || 'Priority : ' || priority as ticket_markdown\n from ticket_details\n)\n\nselect \n *,\n \n \n\n length(\n ticket_markdown\n ) / 4 -- 1 token is approximately 4 characters, and we only need an approximation here.\n as ticket_tokens\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__updates.sql", "original_file_path": "models/intermediate/int_zendesk__updates.sql", "unique_id": "model.zendesk.int_zendesk__updates", "fqn": ["zendesk", "intermediate", "int_zendesk__updates"], "alias": "int_zendesk__updates", "checksum": {"name": "sha256", "checksum": "3ecf6bfe15bd7a820b369379fff7dadf236c00ce2fe6c7e335c73c07ba67de0e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.0417101, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"", "raw_code": "with ticket_history as (\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), ticket_comment as (\n select *\n from {{ ref('stg_zendesk__ticket_comment') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as {{ dbt.type_string() }}) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__updates.sql", "compiled": true, "compiled_code": "with ticket_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), ticket_comment as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as TEXT) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_assignee.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_assignee.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_assignee"], "alias": "int_zendesk__ticket_historical_assignee", "checksum": {"name": "sha256", "checksum": "7ae5d5632274b7ccf900910f272cf791e7e976e48fbd170adca647955ab5e2ae"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.045344, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"", "raw_code": "with assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then {{ dbt.datediff(\"coalesce(previous_update, ticket_created_date)\", \"valid_starting_at\", 'second') }} / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n {{ dbt_utils.group_by(n=6) }}\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_utils.group_by"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_assignee.sql", "compiled": true, "compiled_code": "with assignee_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then \n (\n (\n (\n ((valid_starting_at)::date - (coalesce(previous_update, ticket_created_date))::date)\n * 24 + date_part('hour', (valid_starting_at)::timestamp) - date_part('hour', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + date_part('minute', (valid_starting_at)::timestamp) - date_part('minute', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + floor(date_part('second', (valid_starting_at)::timestamp)) - floor(date_part('second', (coalesce(previous_update, ticket_created_date))::timestamp)))\n / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n group by 1,2,3,4,5,6\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_status": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_status.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_status.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_status", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_status"], "alias": "int_zendesk__ticket_historical_status", "checksum": {"name": "sha256", "checksum": "c3d207d8a59844953cd5d01532d3e023d7441025158cc2385fc3fa1441e34c13"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.050724, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"", "raw_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n {{ dbt.datediff(\n 'valid_starting_at',\n \"coalesce(valid_ending_at, \" ~ dbt.current_timestamp() ~ \")\",\n 'minute') }} as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_status.sql", "compiled": true, "compiled_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n \n (\n (\n ((coalesce(valid_ending_at, now()))::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (coalesce(valid_ending_at, now()))::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (coalesce(valid_ending_at, now()))::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__user_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__user_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__user_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__user_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__user_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__user_aggregates"], "alias": "int_zendesk__user_aggregates", "checksum": {"name": "sha256", "checksum": "ae23565fdc62d13c33ddb03f3b25a5e288ec6e6ffe6b57cb01496be6ecd2b73f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.054941, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"", "raw_code": "with users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n--If you use user tags this will be included, if not it will be ignored.\n{% if var('using_user_tags', True) %}\n), user_tags as (\n\n select *\n from {{ ref('stg_zendesk__user_tag') }}\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n {{ fivetran_utils.string_agg( 'user_tags.tags', \"', '\" )}} as user_tags\n from user_tags\n group by 1\n\n{% endif %}\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,user_tag_aggregate.user_tags\n {% endif %}\n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n left join user_tag_aggregate\n using(user_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__user_tag", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__user_aggregates.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n--If you use user tags this will be included, if not it will be ignored.\n\n), user_tags as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag\"\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n \n string_agg(user_tags.tags, ', ')\n\n as user_tags\n from user_tags\n group by 1\n\n\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,user_tag_aggregate.user_tags\n \n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n \n left join user_tag_aggregate\n using(user_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_spine", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_spine.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_spine.sql", "unique_id": "model.zendesk.int_zendesk__schedule_spine", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_spine"], "alias": "int_zendesk__schedule_spine", "checksum": {"name": "sha256", "checksum": "ee9430d1e865b3c9ac4ac930a94c7a8493dea64ff945cc34403076de4506e5e0"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.060362, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n/*\n This model generates `valid_from` and `valid_until` timestamps for each schedule start_time and stop_time, \n accounting for timezone changes, holidays, and historical schedule adjustments. The inclusion of holidays \n and historical changes is controlled by variables `using_holidays` and `using_schedule_histories`.\n\n !!! Important distinction for holiday ranges: A holiday remains valid through the entire day specified by \n the `valid_until` field. In contrast, schedule history and timezone `valid_until` values mark the end of \n validity at the start of the specified day.\n*/\n\nwith schedule_timezones as (\n select *\n from {{ ref('int_zendesk__schedule_timezones') }} \n\n{% if var('using_holidays', True) %}\n), schedule_holidays as (\n select *\n from {{ ref('int_zendesk__schedule_holiday') }} \n\n-- Joins the schedules with holidays, ensuring holidays fall within the valid schedule period.\n-- If there are no holidays, the columns are filled with null values.\n), join_holidays as (\n select \n schedule_timezones.schedule_id,\n schedule_timezones.time_zone,\n schedule_timezones.offset_minutes,\n schedule_timezones.start_time_utc,\n schedule_timezones.end_time_utc,\n schedule_timezones.schedule_name,\n schedule_timezones.schedule_valid_from,\n schedule_timezones.schedule_valid_until,\n schedule_timezones.schedule_starting_sunday,\n schedule_timezones.schedule_ending_sunday,\n schedule_timezones.change_type,\n schedule_holidays.holiday_date,\n schedule_holidays.holiday_name,\n schedule_holidays.holiday_valid_from,\n schedule_holidays.holiday_valid_until,\n schedule_holidays.holiday_starting_sunday,\n schedule_holidays.holiday_ending_sunday,\n schedule_holidays.holiday_start_or_end\n from schedule_timezones\n left join schedule_holidays\n on schedule_holidays.schedule_id = schedule_timezones.schedule_id\n and schedule_holidays.holiday_date >= schedule_timezones.schedule_valid_from\n and schedule_holidays.holiday_date < schedule_timezones.schedule_valid_until\n\n-- Find and count all holidays that fall within a schedule range.\n), valid_from_partition as(\n select\n join_holidays.*,\n row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index,\n count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index\n from join_holidays\n\n-- Label the partition start and add a row for to account for the partition end if there are multiple valid periods.\n), add_partition_end_row as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n case when valid_from_index = 1 and holiday_start_or_end is not null\n then 'partition_start'\n else holiday_start_or_end\n end as holiday_start_or_end,\n valid_from_index,\n max_valid_from_index\n from valid_from_partition\n \n union all\n\n -- when max_valid_from_index > 1, then we want to duplicate the last row to end the partition.\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n 'partition_end' as holiday_start_or_end,\n max_valid_from_index + 1 as valid_from_index,\n max_valid_from_index\n from valid_from_partition\n where max_valid_from_index > 1\n and valid_from_index = max_valid_from_index -- this finds the last rows to duplicate\n\n-- Adjusts and fills the valid from and valid until times for each partition, taking into account the partition start, gap, or holiday.\n), adjust_ranges as(\n select\n add_partition_end_row.*,\n case\n when holiday_start_or_end = 'partition_start'\n then schedule_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lag(holiday_ending_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_starting_sunday\n when holiday_start_or_end = 'partition_end'\n then holiday_ending_sunday\n else schedule_starting_sunday\n end as valid_from,\n case \n when holiday_start_or_end = 'partition_start'\n then holiday_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lead(holiday_starting_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_ending_sunday\n when holiday_start_or_end = 'partition_end'\n then schedule_ending_sunday\n else schedule_ending_sunday\n end as valid_until\n from add_partition_end_row\n\n), holiday_weeks as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n valid_from,\n valid_until,\n holiday_name,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_start_or_end,\n valid_from_index,\n case when holiday_start_or_end = '1_holiday'\n then 'holiday'\n else change_type\n end as change_type\n from adjust_ranges\n -- filter out irrelevant records after adjusting the ranges\n where not (valid_from >= valid_until and holiday_date is not null)\n\n-- Converts holiday valid_from and valid_until times into minutes from the start of the week, adjusting for timezones.\n), valid_minutes as(\n select\n holiday_weeks.*,\n\n -- Calculate holiday_valid_from in minutes from week start\n case when change_type = 'holiday' \n then ({{ dbt.datediff('holiday_starting_sunday', 'holiday_valid_from', 'minute') }}\n - offset_minutes) -- timezone adjustment\n else null\n end as holiday_valid_from_minutes_from_week_start,\n\n -- Calculate holiday_valid_until in minutes from week start\n case when change_type = 'holiday' \n then ({{ dbt.datediff('holiday_starting_sunday', 'holiday_valid_until', 'minute') }}\n + 24 * 60 -- add 1 day to set the upper bound of the holiday\n - offset_minutes)-- timezone adjustment\n else null\n end as holiday_valid_until_minutes_from_week_start\n from holiday_weeks\n\n-- Identifies whether a schedule overlaps with a holiday by comparing start and end times with holiday minutes.\n), find_holidays as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type,\n case \n when start_time_utc < holiday_valid_until_minutes_from_week_start\n and end_time_utc > holiday_valid_from_minutes_from_week_start\n and change_type = 'holiday' \n then holiday_name\n else cast(null as {{ dbt.type_string() }}) \n end as holiday_name,\n count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc) as number_holidays_in_week\n from valid_minutes\n\n-- Filter out records where holiday overlaps don't match, ensuring each schedule's holiday status is consistent.\n), filter_holidays as(\n select \n *,\n cast(1 as {{ dbt.type_int() }}) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week = 1\n\n union all\n\n -- CFount the number of records for each schedule start_time_utc and end_time_utc for filtering later.\n select \n distinct *,\n cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_name) \n as {{ dbt.type_int() }}) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week > 1\n\n), final as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type\n from filter_holidays\n\n -- This filter ensures that for each schedule, the count of holidays in a week matches the number \n -- of distinct schedule records with the same start_time_utc and end_time_utc.\n -- Rows where this count doesn't match indicate overlap with a holiday, so we filter out that record.\n -- Additionally, schedule records that fall on a holiday are excluded by checking if holiday_name is null.\n where number_holidays_in_week = number_records_for_schedule_start_end\n and holiday_name is null\n\n{% else %} \n), final as(\n select \n schedule_id,\n schedule_valid_from as valid_from,\n schedule_valid_until as valid_until,\n start_time_utc,\n end_time_utc,\n change_type\n from schedule_timezones\n{% endif %} \n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__schedule_timezones", "package": null, "version": null}, {"name": "int_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt.type_string", "macro.dbt.type_int"], "nodes": ["model.zendesk.int_zendesk__schedule_timezones", "model.zendesk.int_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_spine.sql", "compiled": true, "compiled_code": "\n\n/*\n This model generates `valid_from` and `valid_until` timestamps for each schedule start_time and stop_time, \n accounting for timezone changes, holidays, and historical schedule adjustments. The inclusion of holidays \n and historical changes is controlled by variables `using_holidays` and `using_schedule_histories`.\n\n !!! Important distinction for holiday ranges: A holiday remains valid through the entire day specified by \n the `valid_until` field. In contrast, schedule history and timezone `valid_until` values mark the end of \n validity at the start of the specified day.\n*/\n\nwith schedule_timezones as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_timezones\" \n\n\n), schedule_holidays as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_holiday\" \n\n-- Joins the schedules with holidays, ensuring holidays fall within the valid schedule period.\n-- If there are no holidays, the columns are filled with null values.\n), join_holidays as (\n select \n schedule_timezones.schedule_id,\n schedule_timezones.time_zone,\n schedule_timezones.offset_minutes,\n schedule_timezones.start_time_utc,\n schedule_timezones.end_time_utc,\n schedule_timezones.schedule_name,\n schedule_timezones.schedule_valid_from,\n schedule_timezones.schedule_valid_until,\n schedule_timezones.schedule_starting_sunday,\n schedule_timezones.schedule_ending_sunday,\n schedule_timezones.change_type,\n schedule_holidays.holiday_date,\n schedule_holidays.holiday_name,\n schedule_holidays.holiday_valid_from,\n schedule_holidays.holiday_valid_until,\n schedule_holidays.holiday_starting_sunday,\n schedule_holidays.holiday_ending_sunday,\n schedule_holidays.holiday_start_or_end\n from schedule_timezones\n left join schedule_holidays\n on schedule_holidays.schedule_id = schedule_timezones.schedule_id\n and schedule_holidays.holiday_date >= schedule_timezones.schedule_valid_from\n and schedule_holidays.holiday_date < schedule_timezones.schedule_valid_until\n\n-- Find and count all holidays that fall within a schedule range.\n), valid_from_partition as(\n select\n join_holidays.*,\n row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index,\n count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index\n from join_holidays\n\n-- Label the partition start and add a row for to account for the partition end if there are multiple valid periods.\n), add_partition_end_row as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n case when valid_from_index = 1 and holiday_start_or_end is not null\n then 'partition_start'\n else holiday_start_or_end\n end as holiday_start_or_end,\n valid_from_index,\n max_valid_from_index\n from valid_from_partition\n \n union all\n\n -- when max_valid_from_index > 1, then we want to duplicate the last row to end the partition.\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n 'partition_end' as holiday_start_or_end,\n max_valid_from_index + 1 as valid_from_index,\n max_valid_from_index\n from valid_from_partition\n where max_valid_from_index > 1\n and valid_from_index = max_valid_from_index -- this finds the last rows to duplicate\n\n-- Adjusts and fills the valid from and valid until times for each partition, taking into account the partition start, gap, or holiday.\n), adjust_ranges as(\n select\n add_partition_end_row.*,\n case\n when holiday_start_or_end = 'partition_start'\n then schedule_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lag(holiday_ending_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_starting_sunday\n when holiday_start_or_end = 'partition_end'\n then holiday_ending_sunday\n else schedule_starting_sunday\n end as valid_from,\n case \n when holiday_start_or_end = 'partition_start'\n then holiday_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lead(holiday_starting_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_ending_sunday\n when holiday_start_or_end = 'partition_end'\n then schedule_ending_sunday\n else schedule_ending_sunday\n end as valid_until\n from add_partition_end_row\n\n), holiday_weeks as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n valid_from,\n valid_until,\n holiday_name,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_start_or_end,\n valid_from_index,\n case when holiday_start_or_end = '1_holiday'\n then 'holiday'\n else change_type\n end as change_type\n from adjust_ranges\n -- filter out irrelevant records after adjusting the ranges\n where not (valid_from >= valid_until and holiday_date is not null)\n\n-- Converts holiday valid_from and valid_until times into minutes from the start of the week, adjusting for timezones.\n), valid_minutes as(\n select\n holiday_weeks.*,\n\n -- Calculate holiday_valid_from in minutes from week start\n case when change_type = 'holiday' \n then (\n (\n (\n ((holiday_valid_from)::date - (holiday_starting_sunday)::date)\n * 24 + date_part('hour', (holiday_valid_from)::timestamp) - date_part('hour', (holiday_starting_sunday)::timestamp))\n * 60 + date_part('minute', (holiday_valid_from)::timestamp) - date_part('minute', (holiday_starting_sunday)::timestamp))\n \n - offset_minutes) -- timezone adjustment\n else null\n end as holiday_valid_from_minutes_from_week_start,\n\n -- Calculate holiday_valid_until in minutes from week start\n case when change_type = 'holiday' \n then (\n (\n (\n ((holiday_valid_until)::date - (holiday_starting_sunday)::date)\n * 24 + date_part('hour', (holiday_valid_until)::timestamp) - date_part('hour', (holiday_starting_sunday)::timestamp))\n * 60 + date_part('minute', (holiday_valid_until)::timestamp) - date_part('minute', (holiday_starting_sunday)::timestamp))\n \n + 24 * 60 -- add 1 day to set the upper bound of the holiday\n - offset_minutes)-- timezone adjustment\n else null\n end as holiday_valid_until_minutes_from_week_start\n from holiday_weeks\n\n-- Identifies whether a schedule overlaps with a holiday by comparing start and end times with holiday minutes.\n), find_holidays as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type,\n case \n when start_time_utc < holiday_valid_until_minutes_from_week_start\n and end_time_utc > holiday_valid_from_minutes_from_week_start\n and change_type = 'holiday' \n then holiday_name\n else cast(null as TEXT) \n end as holiday_name,\n count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc) as number_holidays_in_week\n from valid_minutes\n\n-- Filter out records where holiday overlaps don't match, ensuring each schedule's holiday status is consistent.\n), filter_holidays as(\n select \n *,\n cast(1 as integer) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week = 1\n\n union all\n\n -- CFount the number of records for each schedule start_time_utc and end_time_utc for filtering later.\n select \n distinct *,\n cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_name) \n as integer) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week > 1\n\n), final as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type\n from filter_holidays\n\n -- This filter ensures that for each schedule, the count of holidays in a week matches the number \n -- of distinct schedule records with the same start_time_utc and end_time_utc.\n -- Rows where this count doesn't match indicate overlap with a holiday, so we filter out that record.\n -- Additionally, schedule records that fall on a holiday are excluded by checking if holiday_name is null.\n where number_holidays_in_week = number_records_for_schedule_start_end\n and holiday_name is null\n\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_schedules": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_schedules", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_schedules.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_schedules.sql", "unique_id": "model.zendesk.int_zendesk__ticket_schedules", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_schedules"], "alias": "int_zendesk__ticket_schedules", "checksum": {"name": "sha256", "checksum": "30511daddcbbf831fc42f7e5039fad1c76a43499f3c208e1b982ab895dfa7d44"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.065963, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket as (\n \n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_schedule as (\n \n select *\n from {{ ref('stg_zendesk__ticket_schedule') }}\n\n), schedule as (\n \n select *\n from {{ ref('stg_zendesk__schedule') }}\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n{% if execute %}\n\n {% set default_schedule_id_query %}\n with set_default_schedule_flag as (\n select \n row_number() over (order by created_at) = 1 as is_default_schedule,\n id\n from {{ source('zendesk','schedule') }}\n where not coalesce(_fivetran_deleted, false)\n )\n select \n id\n from set_default_schedule_flag\n where is_default_schedule\n\n {% endset %}\n\n {% set default_schedule_id = run_query(default_schedule_id_query).columns[0][0]|string %}\n\n {% endif %}\n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '{{default_schedule_id}}' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -5, 'first_schedule.created_at') }} <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , {{ fivetran_utils.timestamp_add(\"hour\", 1000, \"\" ~ dbt.current_timestamp() ~ \"\") }} ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.timestamp_add", "macro.dbt.current_timestamp", "macro.dbt.run_query"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_schedules.sql", "compiled": true, "compiled_code": "\n\nwith ticket as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_schedule as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule\"\n\n), schedule as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n\n\n \n\n \n\n \n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '360000310393' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and \n\n first_schedule.created_at + ((interval '1 second') * (-5))\n\n <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , \n\n now() + ((interval '1 hour') * (1000))\n\n ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__assignee_updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__assignee_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__assignee_updates.sql", "original_file_path": "models/intermediate/int_zendesk__assignee_updates.sql", "unique_id": "model.zendesk.int_zendesk__assignee_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__assignee_updates"], "alias": "int_zendesk__assignee_updates", "checksum": {"name": "sha256", "checksum": "951ec2d4f8c9a7470a50cfc6e01838a090472a9f18fccd2dd65097d309d43aed"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.073171, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__assignee_updates\"", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__assignee_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comment_metrics": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comment_metrics", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__comment_metrics.sql", "original_file_path": "models/intermediate/int_zendesk__comment_metrics.sql", "unique_id": "model.zendesk.int_zendesk__comment_metrics", "fqn": ["zendesk", "intermediate", "int_zendesk__comment_metrics"], "alias": "int_zendesk__comment_metrics", "checksum": {"name": "sha256", "checksum": "b82ef2f9d10d6344cd46dcce904fe263a3b5b2cc12fd9b5c662e8b477a4b5f95"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.074645, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__comment_metrics\"", "raw_code": "with ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__comment_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_timezones": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_timezones", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_timezones.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_timezones.sql", "unique_id": "model.zendesk.int_zendesk__schedule_timezones", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_timezones"], "alias": "int_zendesk__schedule_timezones", "checksum": {"name": "sha256", "checksum": "b381e2d09c8d831ca04da433891079f92b9e08f9a932575f32c12c73fa3df3b8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.075899, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_timezones\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith split_timezones as (\n select *\n from {{ ref('int_zendesk__timezone_daylight') }} \n\n), schedule as (\n select \n *,\n max(created_at) over (partition by schedule_id) as max_created_at\n from {{ var('schedule') }} \n\n{% if var('using_schedule_histories', True) %}\n), schedule_history as (\n select *\n from {{ ref('int_zendesk__schedule_history') }} \n\n-- Select the most recent timezone associated with each schedule based on \n-- the max_created_at timestamp. Historical timezone changes are not yet tracked.\n), schedule_id_timezone as (\n select\n distinct schedule_id,\n lower(time_zone) as time_zone,\n schedule_name\n from schedule\n where created_at = max_created_at\n\n-- Combine historical schedules with the most recent timezone data. Filter \n-- out records where the timezone is missing, indicating the schedule has \n-- been deleted.\n), schedule_history_timezones as (\n select\n schedule_history.schedule_id,\n schedule_history.schedule_id_index,\n schedule_history.start_time,\n schedule_history.end_time,\n schedule_history.valid_from,\n schedule_history.valid_until,\n lower(schedule_id_timezone.time_zone) as time_zone,\n schedule_id_timezone.schedule_name\n from schedule_history\n left join schedule_id_timezone\n on schedule_id_timezone.schedule_id = schedule_history.schedule_id\n -- We have to filter these records out since time math requires timezone\n -- revisit later if this becomes a bigger issue\n where time_zone is not null\n\n-- Combine current schedules with historical schedules. Adjust the valid_from and valid_until dates accordingly.\n), union_schedule_histories as (\n select\n schedule_id,\n 0 as schedule_id_index, -- set the index as 0 for the current schedule\n created_at,\n start_time,\n end_time,\n lower(time_zone) as time_zone,\n schedule_name,\n cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill this value later\n cast({{ dbt.current_timestamp() }} as date) as valid_until,\n False as is_historical\n from schedule\n\n union all\n\n select\n schedule_id,\n schedule_id_index,\n cast(null as {{ dbt.type_timestamp() }}) as created_at,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n cast(valid_from as date) as valid_from,\n cast(valid_until as date) as valid_until,\n True as is_historical\n from schedule_history_timezones\n\n-- Set the schedule_valid_from for current schedules based on the most recent historical row.\n-- This allows the current schedule to pick up where the historical schedule left off.\n), fill_current_schedule as (\n select\n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n coalesce(case\n when schedule_id_index = 0\n -- get max valid_until from historical rows in the same schedule\n then max(case when schedule_id_index > 0 then valid_until end) \n over (partition by schedule_id)\n else valid_from\n end,\n cast(created_at as date))\n as schedule_valid_from,\n valid_until as schedule_valid_until\n from union_schedule_histories\n\n-- Detect adjacent time periods by lagging the schedule_valid_until value \n-- to identify effectively unchanged schedules.\n), lag_valid_until as (\n select \n fill_current_schedule.*,\n lag(schedule_valid_until) over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from, schedule_valid_until) as previous_valid_until\n from fill_current_schedule\n\n-- Identify distinct schedule groupings based on schedule_id, start_time, and end_time.\n-- Consolidate only adjacent schedules; if a schedule changes and later reverts to its original time, \n-- we want to maintain the intermediate schedule change.\n), find_actual_changes as (\n select \n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n\n -- The group_id increments only when there is a gap between the previous schedule's \n -- valid_until and the current schedule's valid_from, signaling the schedules are not adjacent.\n -- Adjacent schedules with the same start_time and end_time are grouped together, \n -- while non-adjacent schedules are treated as separate groups.\n sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row\n over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from\n rows between unbounded preceding and current row)\n as group_id\n from lag_valid_until\n\n-- Consolidate records into continuous periods by finding the minimum \n-- valid_from and maximum valid_until for each group.\n), consolidate_changes as (\n select \n schedule_id,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n group_id,\n min(schedule_id_index) as schedule_id_index, --helps with tracking downstream.\n min(schedule_valid_from) as schedule_valid_from,\n max(schedule_valid_until) as schedule_valid_until\n from find_actual_changes\n {{ dbt_utils.group_by(6) }}\n\n-- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01 for full schedule coverage.\n), reset_schedule_start as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n start_time,\n end_time,\n case \n when schedule_valid_from = min(schedule_valid_from) over (partition by schedule_id) then '1970-01-01'\n else schedule_valid_from\n end as schedule_valid_from,\n schedule_valid_until\n from consolidate_changes\n\n-- Adjust the schedule times to UTC by applying the timezone offset. Join all possible\n-- time_zone matches for each schedule. The erroneous timezones will be filtered next.\n), schedule_timezones as (\n select \n reset_schedule_start.schedule_id,\n reset_schedule_start.schedule_id_index,\n reset_schedule_start.time_zone,\n reset_schedule_start.schedule_name,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes,\n reset_schedule_start.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n reset_schedule_start.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n cast(reset_schedule_start.schedule_valid_from as {{ dbt.type_timestamp() }}) as schedule_valid_from,\n cast(reset_schedule_start.schedule_valid_until as {{ dbt.type_timestamp() }}) as schedule_valid_until,\n -- we'll use these to determine which schedule version to associate tickets with.\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as timezone_valid_from,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as timezone_valid_until\n from reset_schedule_start\n left join split_timezones\n on split_timezones.time_zone = reset_schedule_start.time_zone\n\n-- Assemble the final schedule-timezone relationship by determining the correct \n-- schedule_valid_from and schedule_valid_until based on overlapping periods \n-- between the schedule and timezone. \n), final_schedule as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n timezone_valid_from,\n timezone_valid_until,\n -- Be very careful if changing the order of these case whens--it does matter!\n case\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then schedule_valid_from\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then timezone_valid_from\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_from\n end as schedule_valid_from,\n case\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then schedule_valid_until\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then timezone_valid_until\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_until\n end as schedule_valid_until\n\n from schedule_timezones\n\n -- Filter records based on whether the schedule periods overlap with timezone periods. Capture\n -- when a schedule start or end falls within a time zone, and also capture timezones that exist\n -- entirely within the bounds of a schedule. \n -- timezone that a schedule start falls within\n where (schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until)\n -- timezone that a schedule end falls within\n or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until)\n -- timezones that fall completely within the bounds of the schedule\n or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until)\n\n{% else %} -- when not using schedule histories\n), final_schedule as (\n select \n schedule.schedule_id,\n 0 as schedule_id_index,\n lower(schedule.time_zone) as time_zone,\n schedule.schedule_name,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as schedule_valid_from,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_valid_until,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as timezone_valid_from,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as timezone_valid_until\n from schedule\n left join split_timezones\n on split_timezones.time_zone = lower(schedule.time_zone)\n{% endif %}\n\n), final as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_valid_from,\n schedule_valid_until,\n -- use dbt_date.week_start to ensure we truncate to Sunday\n cast({{ dbt_date.week_start('schedule_valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday,\n cast({{ dbt_date.week_start('schedule_valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday,\n -- Check if the start fo the schedule was from a schedule or timezone change for tracking downstream.\n case when schedule_valid_from = timezone_valid_from\n then 'timezone'\n else 'schedule'\n end as change_type\n from final_schedule\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__timezone_daylight", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}, {"name": "int_zendesk__schedule_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt.date_trunc", "macro.dbt_date.week_start"], "nodes": ["model.zendesk.int_zendesk__timezone_daylight", "model.zendesk_source.stg_zendesk__schedule", "model.zendesk.int_zendesk__schedule_history"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_timezones.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__timezone_daylight as (\n\n\nwith timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n now() + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n now() + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(now() as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as timestamp) as valid_from,\n cast(valid_until as timestamp) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final\n), split_timezones as (\n select *\n from __dbt__cte__int_zendesk__timezone_daylight \n\n), schedule as (\n select \n *,\n max(created_at) over (partition by schedule_id) as max_created_at\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n\n), schedule_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_history\" \n\n-- Select the most recent timezone associated with each schedule based on \n-- the max_created_at timestamp. Historical timezone changes are not yet tracked.\n), schedule_id_timezone as (\n select\n distinct schedule_id,\n lower(time_zone) as time_zone,\n schedule_name\n from schedule\n where created_at = max_created_at\n\n-- Combine historical schedules with the most recent timezone data. Filter \n-- out records where the timezone is missing, indicating the schedule has \n-- been deleted.\n), schedule_history_timezones as (\n select\n schedule_history.schedule_id,\n schedule_history.schedule_id_index,\n schedule_history.start_time,\n schedule_history.end_time,\n schedule_history.valid_from,\n schedule_history.valid_until,\n lower(schedule_id_timezone.time_zone) as time_zone,\n schedule_id_timezone.schedule_name\n from schedule_history\n left join schedule_id_timezone\n on schedule_id_timezone.schedule_id = schedule_history.schedule_id\n -- We have to filter these records out since time math requires timezone\n -- revisit later if this becomes a bigger issue\n where time_zone is not null\n\n-- Combine current schedules with historical schedules. Adjust the valid_from and valid_until dates accordingly.\n), union_schedule_histories as (\n select\n schedule_id,\n 0 as schedule_id_index, -- set the index as 0 for the current schedule\n created_at,\n start_time,\n end_time,\n lower(time_zone) as time_zone,\n schedule_name,\n cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill this value later\n cast(now() as date) as valid_until,\n False as is_historical\n from schedule\n\n union all\n\n select\n schedule_id,\n schedule_id_index,\n cast(null as timestamp) as created_at,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n cast(valid_from as date) as valid_from,\n cast(valid_until as date) as valid_until,\n True as is_historical\n from schedule_history_timezones\n\n-- Set the schedule_valid_from for current schedules based on the most recent historical row.\n-- This allows the current schedule to pick up where the historical schedule left off.\n), fill_current_schedule as (\n select\n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n coalesce(case\n when schedule_id_index = 0\n -- get max valid_until from historical rows in the same schedule\n then max(case when schedule_id_index > 0 then valid_until end) \n over (partition by schedule_id)\n else valid_from\n end,\n cast(created_at as date))\n as schedule_valid_from,\n valid_until as schedule_valid_until\n from union_schedule_histories\n\n-- Detect adjacent time periods by lagging the schedule_valid_until value \n-- to identify effectively unchanged schedules.\n), lag_valid_until as (\n select \n fill_current_schedule.*,\n lag(schedule_valid_until) over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from, schedule_valid_until) as previous_valid_until\n from fill_current_schedule\n\n-- Identify distinct schedule groupings based on schedule_id, start_time, and end_time.\n-- Consolidate only adjacent schedules; if a schedule changes and later reverts to its original time, \n-- we want to maintain the intermediate schedule change.\n), find_actual_changes as (\n select \n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n\n -- The group_id increments only when there is a gap between the previous schedule's \n -- valid_until and the current schedule's valid_from, signaling the schedules are not adjacent.\n -- Adjacent schedules with the same start_time and end_time are grouped together, \n -- while non-adjacent schedules are treated as separate groups.\n sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row\n over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from\n rows between unbounded preceding and current row)\n as group_id\n from lag_valid_until\n\n-- Consolidate records into continuous periods by finding the minimum \n-- valid_from and maximum valid_until for each group.\n), consolidate_changes as (\n select \n schedule_id,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n group_id,\n min(schedule_id_index) as schedule_id_index, --helps with tracking downstream.\n min(schedule_valid_from) as schedule_valid_from,\n max(schedule_valid_until) as schedule_valid_until\n from find_actual_changes\n group by 1,2,3,4,5,6\n\n-- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01 for full schedule coverage.\n), reset_schedule_start as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n start_time,\n end_time,\n case \n when schedule_valid_from = min(schedule_valid_from) over (partition by schedule_id) then '1970-01-01'\n else schedule_valid_from\n end as schedule_valid_from,\n schedule_valid_until\n from consolidate_changes\n\n-- Adjust the schedule times to UTC by applying the timezone offset. Join all possible\n-- time_zone matches for each schedule. The erroneous timezones will be filtered next.\n), schedule_timezones as (\n select \n reset_schedule_start.schedule_id,\n reset_schedule_start.schedule_id_index,\n reset_schedule_start.time_zone,\n reset_schedule_start.schedule_name,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes,\n reset_schedule_start.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n reset_schedule_start.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n cast(reset_schedule_start.schedule_valid_from as timestamp) as schedule_valid_from,\n cast(reset_schedule_start.schedule_valid_until as timestamp) as schedule_valid_until,\n -- we'll use these to determine which schedule version to associate tickets with.\n cast(date_trunc('day', split_timezones.valid_from) as timestamp) as timezone_valid_from,\n cast(date_trunc('day', split_timezones.valid_until) as timestamp) as timezone_valid_until\n from reset_schedule_start\n left join split_timezones\n on split_timezones.time_zone = reset_schedule_start.time_zone\n\n-- Assemble the final schedule-timezone relationship by determining the correct \n-- schedule_valid_from and schedule_valid_until based on overlapping periods \n-- between the schedule and timezone. \n), final_schedule as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n timezone_valid_from,\n timezone_valid_until,\n -- Be very careful if changing the order of these case whens--it does matter!\n case\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then schedule_valid_from\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then timezone_valid_from\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_from\n end as schedule_valid_from,\n case\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then schedule_valid_until\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then timezone_valid_until\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_until\n end as schedule_valid_until\n\n from schedule_timezones\n\n -- Filter records based on whether the schedule periods overlap with timezone periods. Capture\n -- when a schedule start or end falls within a time zone, and also capture timezones that exist\n -- entirely within the bounds of a schedule. \n -- timezone that a schedule start falls within\n where (schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until)\n -- timezone that a schedule end falls within\n or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until)\n -- timezones that fall completely within the bounds of the schedule\n or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until)\n\n\n\n), final as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_valid_from,\n schedule_valid_until,\n -- use dbt_date.week_start to ensure we truncate to Sunday\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_valid_from + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as schedule_starting_sunday,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_valid_until + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as schedule_ending_sunday,\n -- Check if the start fo the schedule was from a schedule or timezone change for tracking downstream.\n case when schedule_valid_from = timezone_valid_from\n then 'timezone'\n else 'schedule'\n end as change_type\n from final_schedule\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__timezone_daylight", "sql": " __dbt__cte__int_zendesk__timezone_daylight as (\n\n\nwith timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n now() + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n now() + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(now() as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as timestamp) as valid_from,\n cast(valid_until as timestamp) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_group": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_group.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_group.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_group", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_group"], "alias": "int_zendesk__ticket_historical_group", "checksum": {"name": "sha256", "checksum": "7d4d72f5d6a7ef73a23ad4be966b00683532fe2a11c9729a8d640752ebee1adc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.085171, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"", "raw_code": "with ticket_group_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_group.sql", "compiled": true, "compiled_code": "with ticket_group_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_history", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_history.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_history.sql", "unique_id": "model.zendesk.int_zendesk__schedule_history", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_history"], "alias": "int_zendesk__schedule_history", "checksum": {"name": "sha256", "checksum": "fa0eb9cea317033ef318536affc3f6a42cd178d1b0959d6341e2dbbdceed5ae0"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.0866601, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_history\"", "raw_code": "{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_histories'])) }}\n\nwith audit_logs as (\n select\n cast(source_id as {{ dbt.type_string() }}) as schedule_id,\n created_at,\n lower(change_description) as change_description\n from {{ var('audit_log') }}\n where lower(change_description) like '%workweek changed from%'\n\n-- the formats for change_description vary, so it needs to be cleaned\n), audit_logs_enhanced as (\n select \n schedule_id,\n rank() over (partition by schedule_id order by created_at desc) as schedule_id_index,\n created_at,\n -- Clean up the change_description, sometimes has random html stuff in it\n replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(change_description,\n 'workweek changed from', ''), \n '"', '\"'), \n 'amp;', ''), \n '=>', ':'), ':mon:', '\"mon\":'), ':tue:', '\"tue\":'), ':wed:', '\"wed\":'), ':thu:', '\"thu\":'), ':fri:', '\"fri\":'), ':sat:', '\"sat\":'), ':sun:', '\"sun\":')\n as change_description_cleaned\n from audit_logs\n\n), split_to_from as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n cast(created_at as date) as valid_from,\n -- each change_description has two parts: 1-from the old schedule 2-to the new schedule.\n {{ dbt.split_part('change_description_cleaned', \"' to '\", 1) }} as schedule_change_from,\n {{ dbt.split_part('change_description_cleaned', \"' to '\", 2) }} as schedule_change\n from audit_logs_enhanced\n\n), find_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n schedule_change_from,\n schedule_change,\n row_number() over (\n partition by schedule_id, valid_from -- valid from is type date\n -- ordering to get the latest change when there are multiple on one day\n order by schedule_id_index, schedule_change_from -- use the length of schedule_change_from to tie break, which will deprioritize empty \"from\" schedules\n ) as row_number\n from split_to_from\n\n-- multiple changes can occur on one day, so we will keep only the latest change in a day.\n), consolidate_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n lead(valid_from) over (\n partition by schedule_id order by schedule_id_index desc) as valid_until,\n schedule_change\n from find_same_day_changes\n where row_number = 1\n\n-- Creates a record for each day of the week for each schedule_change event.\n-- This is done by iterating over the days of the week, extracting the corresponding \n-- schedule data for each day, and unioning the results after each iteration.\n), split_days as (\n {% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %}\n {% for day, day_number in days_of_week.items() %}\n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n '{{ day }}' as day_of_week,\n cast('{{ day_number }}' as {{ dbt.type_int() }}) as day_of_week_number,\n {{ zendesk.regex_extract('schedule_change', day) }} as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n {% if not loop.last %}union all{% endif %}\n {% endfor %}\n\n-- A single day may contain multiple start and stop times, so we need to generate a separate record for each.\n-- The day_of_week_schedule is structured like a JSON string, requiring warehouse-specific logic to flatten it into individual records.\n{% if target.type == 'redshift' %}\n-- using PartiQL syntax to work with redshift's SUPER types, which requires an extra CTE\n), redshift_parse_schedule as (\n -- Redshift requires another CTE for unnesting \n select \n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n day_of_week,\n day_of_week_number,\n day_of_week_schedule,\n json_parse('[' || replace(replace(day_of_week_schedule, ', ', ','), ',', '},{') || ']') as json_schedule\n\n from split_days\n where day_of_week_schedule != '{}' -- exclude when the day_of_week_schedule in empty. \n\n), unnested_schedules as (\n select \n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n day_of_week,\n day_of_week_number,\n -- go back to strings\n cast(day_of_week_schedule as {{ dbt.type_string() }}) as day_of_week_schedule,\n {{ clean_schedule('JSON_SERIALIZE(unnested_schedule)') }} as cleaned_unnested_schedule\n \n from redshift_parse_schedule as schedules, schedules.json_schedule as unnested_schedule\n\n{% else %}\n), unnested_schedules as (\n select\n split_days.*,\n\n {%- if target.type == 'bigquery' %}\n {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule\n from split_days\n cross join unnest(json_extract_array('[' || replace(day_of_week_schedule, ',', '},{') || ']', '$')) as unnested_schedule\n\n {%- elif target.type == 'snowflake' %}\n unnested_schedule.key || ':' || unnested_schedule.value as cleaned_unnested_schedule\n from split_days\n cross join lateral flatten(input => parse_json(replace(replace(day_of_week_schedule, '\\}\\}', '\\}'), '\\{\\{', '\\{'))) as unnested_schedule\n\n {%- elif target.type == 'postgres' %}\n {{ clean_schedule('unnested_schedule::text') }} as cleaned_unnested_schedule\n from split_days\n cross join lateral jsonb_array_elements(('[' || replace(day_of_week_schedule, ',', '},{') || ']')::jsonb) as unnested_schedule\n\n {%- elif target.type in ('databricks', 'spark') %}\n {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule\n from split_days\n lateral view explode(from_json(concat('[', replace(day_of_week_schedule, ',', '},{'), ']'), 'array')) as unnested_schedule\n\n {% else %}\n cast(null as {{ dbt.type_string() }}) as cleaned_unnested_schedule\n from split_days\n {%- endif %}\n\n{% endif %}\n\n-- Each cleaned_unnested_schedule will have the format hh:mm:hh:mm, so we can extract each time part. \n), split_times as (\n select \n unnested_schedules.*,\n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 1) }}, ' ') as {{ dbt.type_int() }}) as start_time_hh, \n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 2) }}, ' ') as {{ dbt.type_int() }}) as start_time_mm, \n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 3) }}, ' ') as {{ dbt.type_int() }}) as end_time_hh, \n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 4) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm\n from unnested_schedules\n\n-- Calculate the start_time and end_time as minutes from Sunday\n), calculate_start_end_times as (\n select\n schedule_id,\n schedule_id_index,\n start_time_hh * 60 + start_time_mm + 24 * 60 * day_of_week_number as start_time,\n end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time,\n valid_from,\n valid_until,\n day_of_week,\n day_of_week_number\n from split_times\n)\n\nselect * \nfrom calculate_start_end_times", "language": "sql", "refs": [{"name": "stg_zendesk__audit_log", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.dbt.type_string", "macro.dbt.split_part", "macro.dbt.type_int", "macro.zendesk.regex_extract", "macro.zendesk.clean_schedule"], "nodes": ["model.zendesk_source.stg_zendesk__audit_log"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_history.sql", "compiled": true, "compiled_code": "\n\nwith audit_logs as (\n select\n cast(source_id as TEXT) as schedule_id,\n created_at,\n lower(change_description) as change_description\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log\"\n where lower(change_description) like '%workweek changed from%'\n\n-- the formats for change_description vary, so it needs to be cleaned\n), audit_logs_enhanced as (\n select \n schedule_id,\n rank() over (partition by schedule_id order by created_at desc) as schedule_id_index,\n created_at,\n -- Clean up the change_description, sometimes has random html stuff in it\n replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(change_description,\n 'workweek changed from', ''), \n '"', '\"'), \n 'amp;', ''), \n '=>', ':'), ':mon:', '\"mon\":'), ':tue:', '\"tue\":'), ':wed:', '\"wed\":'), ':thu:', '\"thu\":'), ':fri:', '\"fri\":'), ':sat:', '\"sat\":'), ':sun:', '\"sun\":')\n as change_description_cleaned\n from audit_logs\n\n), split_to_from as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n cast(created_at as date) as valid_from,\n -- each change_description has two parts: 1-from the old schedule 2-to the new schedule.\n \n\n \n \n\n split_part(\n change_description_cleaned,\n ' to ',\n 1\n )\n\n\n \n\n as schedule_change_from,\n \n\n \n \n\n split_part(\n change_description_cleaned,\n ' to ',\n 2\n )\n\n\n \n\n as schedule_change\n from audit_logs_enhanced\n\n), find_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n schedule_change_from,\n schedule_change,\n row_number() over (\n partition by schedule_id, valid_from -- valid from is type date\n -- ordering to get the latest change when there are multiple on one day\n order by schedule_id_index, schedule_change_from -- use the length of schedule_change_from to tie break, which will deprioritize empty \"from\" schedules\n ) as row_number\n from split_to_from\n\n-- multiple changes can occur on one day, so we will keep only the latest change in a day.\n), consolidate_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n lead(valid_from) over (\n partition by schedule_id order by schedule_id_index desc) as valid_until,\n schedule_change\n from find_same_day_changes\n where row_number = 1\n\n-- Creates a record for each day of the week for each schedule_change event.\n-- This is done by iterating over the days of the week, extracting the corresponding \n-- schedule data for each day, and unioning the results after each iteration.\n), split_days as (\n \n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'sun' as day_of_week,\n cast('0' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?sun.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'mon' as day_of_week,\n cast('1' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?mon.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'tue' as day_of_week,\n cast('2' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?tue.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'wed' as day_of_week,\n cast('3' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?wed.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'thu' as day_of_week,\n cast('4' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?thu.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'fri' as day_of_week,\n cast('5' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?fri.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'sat' as day_of_week,\n cast('6' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?sat.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n \n \n\n-- A single day may contain multiple start and stop times, so we need to generate a separate record for each.\n-- The day_of_week_schedule is structured like a JSON string, requiring warehouse-specific logic to flatten it into individual records.\n\n), unnested_schedules as (\n select\n split_days.*,\n replace(replace(replace(replace(cast(unnested_schedule::text as TEXT), '{', ''), '}', ''), '\"', ''), ' ', '') as cleaned_unnested_schedule\n from split_days\n cross join lateral jsonb_array_elements(('[' || replace(day_of_week_schedule, ',', '},{') || ']')::jsonb) as unnested_schedule\n\n\n\n-- Each cleaned_unnested_schedule will have the format hh:mm:hh:mm, so we can extract each time part. \n), split_times as (\n select \n unnested_schedules.*,\n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 1\n )\n\n\n \n\n, ' ') as integer) as start_time_hh, \n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 2\n )\n\n\n \n\n, ' ') as integer) as start_time_mm, \n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 3\n )\n\n\n \n\n, ' ') as integer) as end_time_hh, \n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 4\n )\n\n\n \n\n, ' ') as integer) as end_time_mm\n from unnested_schedules\n\n-- Calculate the start_time and end_time as minutes from Sunday\n), calculate_start_end_times as (\n select\n schedule_id,\n schedule_id_index,\n start_time_hh * 60 + start_time_mm + 24 * 60 * day_of_week_number as start_time,\n end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time,\n valid_from,\n valid_until,\n day_of_week,\n day_of_week_number\n from split_times\n)\n\nselect * \nfrom calculate_start_end_times", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_holiday": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_holiday", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_holiday.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_holiday.sql", "unique_id": "model.zendesk.int_zendesk__schedule_holiday", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_holiday"], "alias": "int_zendesk__schedule_holiday", "checksum": {"name": "sha256", "checksum": "6c29a7b0c63792193aff20d849a140f105431fc73033c4db32da15d7cfaad005"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.103731, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_holiday\"", "raw_code": "{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_holidays'])) }}\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may \n change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream \n to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time).\n*/\n\n\nwith schedule as (\n select *\n from {{ var('schedule') }} \n\n), schedule_holiday as (\n select *\n from {{ var('schedule_holiday') }} \n\n-- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start.\n), schedule_holiday_ranges as (\n select\n holiday_name,\n schedule_id,\n cast({{ dbt.date_trunc('day', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from,\n cast({{ dbt.date_trunc('day', 'holiday_end_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until,\n cast({{ dbt_date.week_start('holiday_start_date_at','UTC') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday,\n cast({{ dbt_date.week_start(dbt.dateadd('week', 1, 'holiday_end_date_at'),'UTC') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday,\n -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays.\n {{ dbt.datediff('holiday_start_date_at', 'holiday_end_date_at', 'week') }} + 1 as holiday_weeks_spanned\n from schedule_holiday\n\n-- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte.\n), expanded_holidays as (\n select\n schedule_holiday_ranges.*,\n cast(week_numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number\n from schedule_holiday_ranges\n -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks\n cross join ({{ dbt_utils.generate_series(upper_bound=52) }}) as week_numbers\n where schedule_holiday_ranges.holiday_weeks_spanned > 1\n and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned\n\n-- Define start and end times for each segment of a multi-week holiday.\n), split_multiweek_holidays as (\n\n -- Business as usual for holidays that fall within a single week.\n select\n holiday_name,\n schedule_id,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_weeks_spanned\n from schedule_holiday_ranges\n where holiday_weeks_spanned = 1\n\n union all\n\n -- Split holidays by week that span multiple weeks since the schedule spine is based on weeks.\n select\n holiday_name,\n schedule_id,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_valid_from\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})\n end as holiday_valid_from,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_valid_until\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', -1, dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday\n end as holiday_valid_until,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_starting_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})\n end as holiday_starting_sunday,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_ending_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})\n end as holiday_ending_sunday,\n holiday_weeks_spanned\n from expanded_holidays\n where holiday_weeks_spanned > 1\n\n-- Create a record for each the holiday start and holiday end for each week to use downstream.\n), split_holidays as (\n -- Creates a record that will be used for the time before a holiday\n select\n split_multiweek_holidays.*,\n holiday_valid_from as holiday_date,\n '0_gap' as holiday_start_or_end\n from split_multiweek_holidays\n\n union all\n\n -- Creates another record that will be used for the holiday itself\n select\n split_multiweek_holidays.*,\n holiday_valid_until as holiday_date,\n '1_holiday' as holiday_start_or_end\n from split_multiweek_holidays\n)\n\nselect *\nfrom split_holidays", "language": "sql", "refs": [{"name": "stg_zendesk__schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.dbt.date_trunc", "macro.dbt.type_timestamp", "macro.dbt_date.week_start", "macro.dbt.dateadd", "macro.dbt.datediff", "macro.dbt.type_int", "macro.dbt_utils.generate_series"], "nodes": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_holiday.sql", "compiled": true, "compiled_code": "\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may \n change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream \n to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time).\n*/\n\n\nwith schedule as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n), schedule_holiday as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\" \n\n-- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start.\n), schedule_holiday_ranges as (\n select\n holiday_name,\n schedule_id,\n cast(date_trunc('day', holiday_start_date_at) as timestamp) as holiday_valid_from,\n cast(date_trunc('day', holiday_end_date_at) as timestamp) as holiday_valid_until,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n holiday_start_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_starting_sunday,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n \n\n holiday_end_date_at + ((interval '1 week') * (1))\n\n + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_ending_sunday,\n -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays.\n \n (\n ((holiday_end_date_at)::date - (holiday_start_date_at)::date)\n / 7 + case\n when date_part('dow', (holiday_start_date_at)::timestamp) <= date_part('dow', (holiday_end_date_at)::timestamp) then\n case when holiday_start_date_at <= holiday_end_date_at then 0 else -1 end\n else\n case when holiday_start_date_at <= holiday_end_date_at then 1 else 0 end\n end)\n + 1 as holiday_weeks_spanned\n from schedule_holiday\n\n-- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte.\n), expanded_holidays as (\n select\n schedule_holiday_ranges.*,\n cast(week_numbers.generated_number as integer) as holiday_week_number\n from schedule_holiday_ranges\n -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks\n cross join (\n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n) as week_numbers\n where schedule_holiday_ranges.holiday_weeks_spanned > 1\n and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned\n\n-- Define start and end times for each segment of a multi-week holiday.\n), split_multiweek_holidays as (\n\n -- Business as usual for holidays that fall within a single week.\n select\n holiday_name,\n schedule_id,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_weeks_spanned\n from schedule_holiday_ranges\n where holiday_weeks_spanned = 1\n\n union all\n\n -- Split holidays by week that span multiple weeks since the schedule spine is based on weeks.\n select\n holiday_name,\n schedule_id,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_valid_from\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * ((holiday_week_number - 1) * 7))\n\n as timestamp)\n end as holiday_valid_from,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_valid_until\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n \n\n holiday_starting_sunday + ((interval '1 day') * (holiday_week_number * 7))\n\n + ((interval '1 day') * (-1))\n\n as timestamp) -- saturday\n end as holiday_valid_until,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_starting_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * ((holiday_week_number - 1) * 7))\n\n as timestamp)\n end as holiday_starting_sunday,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_ending_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * (holiday_week_number * 7))\n\n as timestamp)\n end as holiday_ending_sunday,\n holiday_weeks_spanned\n from expanded_holidays\n where holiday_weeks_spanned > 1\n\n-- Create a record for each the holiday start and holiday end for each week to use downstream.\n), split_holidays as (\n -- Creates a record that will be used for the time before a holiday\n select\n split_multiweek_holidays.*,\n holiday_valid_from as holiday_date,\n '0_gap' as holiday_start_or_end\n from split_multiweek_holidays\n\n union all\n\n -- Creates another record that will be used for the holiday itself\n select\n split_multiweek_holidays.*,\n holiday_valid_until as holiday_date,\n '1_holiday' as holiday_start_or_end\n from split_multiweek_holidays\n)\n\nselect *\nfrom split_holidays", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__requester_updates.sql", "original_file_path": "models/intermediate/int_zendesk__requester_updates.sql", "unique_id": "model.zendesk.int_zendesk__requester_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__requester_updates"], "alias": "int_zendesk__requester_updates", "checksum": {"name": "sha256", "checksum": "b2d14b09db3cadfb56e4b3dcb55c4f9000e670e3c7c29ef89b249e626e8ba103"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.113943, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_updates\"", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__requester_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_satisfaction.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_satisfaction"], "alias": "int_zendesk__ticket_historical_satisfaction", "checksum": {"name": "sha256", "checksum": "dce9b5b8705d72688802f99250a8f8a34b8791c3cb440f85efa11f09ebfe3e1d"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.1155572, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"", "raw_code": "with satisfaction_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "compiled": true, "compiled_code": "with satisfaction_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__latest_ticket_form": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__latest_ticket_form.sql", "original_file_path": "models/intermediate/int_zendesk__latest_ticket_form.sql", "unique_id": "model.zendesk.int_zendesk__latest_ticket_form", "fqn": ["zendesk", "intermediate", "int_zendesk__latest_ticket_form"], "alias": "int_zendesk__latest_ticket_form", "checksum": {"name": "sha256", "checksum": "906a97576bff9f4fead3b0ed4632aa8a04b94f523e62b0e05425770213f78ea5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728492761.1169941, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__latest_ticket_form\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith ticket_form_history as (\n select *\n from {{ ref('stg_zendesk__ticket_form_history') }}\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__latest_ticket_form.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith ticket_form_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__ticket_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_aggregates"], "alias": "int_zendesk__ticket_aggregates", "checksum": {"name": "sha256", "checksum": "cef0c080fae7a2b361b077473aa1ccfd4bfa472469b9006038aa3866a5bf8b50"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.12063, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"", "raw_code": "with tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_tags as (\n\n select *\n from {{ ref('stg_zendesk__ticket_tag') }}\n\n), brands as (\n\n select *\n from {{ ref('stg_zendesk__brand') }}\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n {{ fivetran_utils.string_agg( 'ticket_tags.tags', \"', '\" )}} as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag", "model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_aggregates.sql", "compiled": true, "compiled_code": "with tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_tags as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag\"\n\n), brands as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n \n string_agg(ticket_tags.tags, ', ')\n\n as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__organization_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__organization_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__organization_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__organization_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__organization_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__organization_aggregates"], "alias": "int_zendesk__organization_aggregates", "checksum": {"name": "sha256", "checksum": "a16300f45d2cb0bd1c26dfec62e967a047095b92f340974bfef56178bfff6cf9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.124181, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"", "raw_code": "with organizations as (\n select * \n from {{ ref('stg_zendesk__organization') }}\n\n--If you use organization tags this will be included, if not it will be ignored.\n{% if var('using_organization_tags', True) %}\n), organization_tags as (\n select * \n from {{ ref('stg_zendesk__organization_tag') }}\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('organization_tags.tags', \"', '\" ) }} as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n{% endif %}\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n{% if var('using_domain_names', True) %}\n), domain_names as (\n\n select *\n from {{ ref('stg_zendesk__domain_name') }}\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('domain_names.domain_name', \"', '\" ) }} as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n{% endif %}\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,tag_aggregates.organization_tags\n {% endif %}\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,domain_aggregates.domain_names\n {% endif %}\n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n left join domain_aggregates\n using(organization_id)\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n left join tag_aggregates\n using(organization_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag", "package": null, "version": null}, {"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag", "model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__organization_aggregates.sql", "compiled": true, "compiled_code": "with organizations as (\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\n\n--If you use organization tags this will be included, if not it will be ignored.\n\n), organization_tags as (\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag\"\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(organization_tags.tags, ', ')\n\n as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n\n), domain_names as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(domain_names.domain_name, ', ')\n\n as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,tag_aggregates.organization_tags\n \n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,domain_aggregates.domain_names\n \n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n left join domain_aggregates\n using(organization_id)\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n left join tag_aggregates\n using(organization_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "operation.zendesk.zendesk-on-run-start-0": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk-on-run-start-0", "resource_type": "operation", "package_name": "zendesk", "path": "hooks/zendesk-on-run-start-0.sql", "original_file_path": "./dbt_project.yml", "unique_id": "operation.zendesk.zendesk-on-run-start-0", "fqn": ["zendesk", "hooks", "zendesk-on-run-start-0"], "alias": "zendesk-on-run-start-0", "checksum": {"name": "sha256", "checksum": "36fcc85263683c498c3e819ae5ca2dfac8f8dcdd9bef0c19497a6aed3b8d92e2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": ["on-run-start"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728492761.1966121, "relation_name": null, "raw_code": "{{ fivetran_utils.empty_variable_warning(\"ticket_field_history_columns\", \"zendesk_ticket_field_history\") }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.empty_variable_warning"], "nodes": []}, "compiled_path": "target/compiled/zendesk/./dbt_project.yml/hooks/zendesk-on-run-start-0.sql", "compiled": true, "compiled_code": "\n\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "index": 0}, "model.zendesk_source.stg_zendesk__user_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user_tag.sql", "original_file_path": "models/stg_zendesk__user_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag", "fqn": ["zendesk_source", "stg_zendesk__user_tag"], "alias": "stg_zendesk__user_tag", "checksum": {"name": "sha256", "checksum": "0aabe5c461e492bc7afb162a0dcb6e3334cca4c60093eb5be52b74e5dbfa429b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6920571, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag\"", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__user_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tag_tmp')),\n staging_columns=get_user_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_tag.sql", "original_file_path": "models/stg_zendesk__ticket_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag", "fqn": ["zendesk_source", "stg_zendesk__ticket_tag"], "alias": "stg_zendesk__ticket_tag", "checksum": {"name": "sha256", "checksum": "41ea7cea80e135bf87adfff97bfadecd5c8ee0622d74f9904759305fd6cb7541"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.696689, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tag_tmp')),\n staging_columns=get_ticket_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n {% if target.type == 'redshift' %}\n \"tag\" as tags\n {% else %}\n tag as tags\n {% endif %}\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_tag.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n \n tag as tags\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_field_history.sql", "original_file_path": "models/stg_zendesk__ticket_field_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_field_history"], "alias": "stg_zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "5c165700bdcc50383952e4c645b4d6c42d5410205205c5de889b009dad3b0a10"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_starting_at": {"name": "valid_starting_at", "description": "The time the ticket field value became valid", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_ending_at": {"name": "valid_ending_at", "description": "The time the ticket field value became invalidated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.6978319, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_field_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_field_history_tmp')),\n staging_columns=get_ticket_field_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as {{ dbt.type_timestamp() }}) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as {{ dbt.type_timestamp() }}) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_field_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n field_name\n \n as \n \n field_name\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n updated\n \n as \n \n updated\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n value\n \n as \n \n value\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as timestamp) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as timestamp) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule_holiday.sql", "original_file_path": "models/stg_zendesk__schedule_holiday.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday", "fqn": ["zendesk_source", "stg_zendesk__schedule_holiday"], "alias": "stg_zendesk__schedule_holiday", "checksum": {"name": "sha256", "checksum": "7e546e0327511ba1db938c68a962b4892fe3462d4ffe23baf84aa1c88e4db9c1"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Information about holidays for each specified schedule.", "columns": {"end_date_at": {"name": "end_date_at", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_id": {"name": "holiday_id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_name": {"name": "holiday_name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date_at": {"name": "start_date_at", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.700835, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"", "raw_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_holidays'])) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_holiday_tmp') }}\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_holiday_tmp')),\n staging_columns=get_schedule_holiday_columns()\n )\n }}\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as {{ dbt.type_timestamp() }} ) as _fivetran_synced,\n cast(end_date as {{ dbt.type_timestamp() }} ) as holiday_end_date_at,\n cast(id as {{ dbt.type_string() }} ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as {{ dbt.type_string() }} ) as schedule_id,\n cast(start_date as {{ dbt.type_timestamp() }} ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.zendesk_source.get_schedule_holiday_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule_holiday.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n end_date\n \n as \n \n end_date\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n start_date\n \n as \n \n start_date\n \n\n\n\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as timestamp ) as _fivetran_synced,\n cast(end_date as timestamp ) as holiday_end_date_at,\n cast(id as TEXT ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as TEXT ) as schedule_id,\n cast(start_date as timestamp ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__daylight_time.sql", "original_file_path": "models/stg_zendesk__daylight_time.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time", "fqn": ["zendesk_source", "stg_zendesk__daylight_time"], "alias": "stg_zendesk__daylight_time", "checksum": {"name": "sha256", "checksum": "8bc98221c9781fc37b2424b62b5d72cd62b62c53aa887be08e98114f98530df9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset_minutes": {"name": "daylight_offset_minutes", "description": "Number of **minutes** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.69906, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__daylight_time_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__daylight_time_tmp')),\n staging_columns=get_daylight_time_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_daylight_time_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__daylight_time.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n daylight_end_utc\n \n as \n \n daylight_end_utc\n \n, \n \n \n daylight_offset\n \n as \n \n daylight_offset\n \n, \n \n \n daylight_start_utc\n \n as \n \n daylight_start_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n year\n \n as \n \n year\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization.sql", "original_file_path": "models/stg_zendesk__organization.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization", "fqn": ["zendesk_source", "stg_zendesk__organization"], "alias": "stg_zendesk__organization", "checksum": {"name": "sha256", "checksum": "5fb51f160efdf3ffa60e0a7be33e40e4b59f814d345558631e06fcce160f6329"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"organization_id": {"name": "organization_id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.690371, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tmp')),\n staging_columns=get_organization_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__organization_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_columns", "macro.fivetran_utils.fill_staging_columns", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n shared_comments\n \n as \n \n shared_comments\n \n, \n \n \n shared_tickets\n \n as \n \n shared_tickets\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__time_zone.sql", "original_file_path": "models/stg_zendesk__time_zone.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone", "fqn": ["zendesk_source", "stg_zendesk__time_zone"], "alias": "stg_zendesk__time_zone", "checksum": {"name": "sha256", "checksum": "289f08e30f9298f5b4beed89d28c1ff6a82386ee7c9f5084499eedb8998aa137"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset_minutes": {"name": "standard_offset_minutes", "description": "Standard offset of the timezone (non-daylight savings hours) in minutes.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.699759, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__time_zone_tmp') }}\n\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__time_zone_tmp')),\n staging_columns=get_time_zone_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=1) }} as {{ dbt.type_int() }} ) * 60 +\n (cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=2) }} as {{ dbt.type_int() }} ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}, {"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_time_zone_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.split_part", "macro.dbt.type_int"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__time_zone.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"\n\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n standard_offset\n \n as \n \n standard_offset\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 1\n )\n\n\n \n\n as integer ) * 60 +\n (cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 2\n )\n\n\n \n\n as integer ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__group.sql", "original_file_path": "models/stg_zendesk__group.sql", "unique_id": "model.zendesk_source.stg_zendesk__group", "fqn": ["zendesk_source", "stg_zendesk__group"], "alias": "stg_zendesk__group", "checksum": {"name": "sha256", "checksum": "21a956af3b03e9e49e9e94ade093fa716db9f061e7eb9e209c3ff7f9986b15b9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"group_id": {"name": "group_id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.689462, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__group_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__group_tmp')),\n staging_columns=get_group_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__group_tmp", "package": null, "version": null}, {"name": "stg_zendesk__group_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_group_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__group_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__group.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_comment.sql", "original_file_path": "models/stg_zendesk__ticket_comment.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment", "fqn": ["zendesk_source", "stg_zendesk__ticket_comment"], "alias": "stg_zendesk__ticket_comment", "checksum": {"name": "sha256", "checksum": "ffc2c4310aafe6b90a26e22cdab400e6d4c750faab7ea4d7519b2cf9105d3f16"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"ticket_comment_id": {"name": "ticket_comment_id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_facebook_comment": {"name": "is_facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_tweet": {"name": "is_tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_voice_comment": {"name": "is_voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.691776, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_comment_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_comment_tmp')),\n staging_columns=get_ticket_comment_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n _fivetran_deleted,\n body,\n cast(created as {{ dbt.type_timestamp() }}) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_comment_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_comment.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n body\n \n as \n \n body\n \n, \n cast(null as integer) as \n \n call_duration\n \n , \n cast(null as integer) as \n \n call_id\n \n , \n \n \n created\n \n as \n \n created\n \n, \n \n \n facebook_comment\n \n as \n \n facebook_comment\n \n, \n \n \n id\n \n as \n \n id\n \n, \n cast(null as integer) as \n \n location\n \n , \n \n \n public\n \n as \n \n public\n \n, \n cast(null as integer) as \n \n recording_url\n \n , \n cast(null as timestamp) as \n \n started_at\n \n , \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n cast(null as integer) as \n \n transcription_status\n \n , \n cast(null as integer) as \n \n transcription_text\n \n , \n cast(null as integer) as \n \n trusted\n \n , \n \n \n tweet\n \n as \n \n tweet\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n voice_comment\n \n as \n \n voice_comment\n \n, \n cast(null as integer) as \n \n voice_comment_transcription_visible\n \n \n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n _fivetran_deleted,\n body,\n cast(created as timestamp) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_schedule.sql", "original_file_path": "models/stg_zendesk__ticket_schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule", "fqn": ["zendesk_source", "stg_zendesk__ticket_schedule"], "alias": "stg_zendesk__ticket_schedule", "checksum": {"name": "sha256", "checksum": "69d32ac51b73241f990f8c1a08309cb42e79d0c1b26b99a7060353bfee88066e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6956532, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_schedule_tmp')),\n staging_columns=get_ticket_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(schedule_id as {{ dbt.type_string() }}) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as timestamp) as created_at,\n cast(schedule_id as TEXT) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule.sql", "original_file_path": "models/stg_zendesk__schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule", "fqn": ["zendesk_source", "stg_zendesk__schedule"], "alias": "stg_zendesk__schedule", "checksum": {"name": "sha256", "checksum": "336dabaf980af5f08c6a5f43d04cdfd00146191b0927176fe4add5f65117c673"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The support schedules created with different business hours and holidays.", "columns": {"schedule_id": {"name": "schedule_id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_name": {"name": "schedule_name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6953359, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_tmp')),\n staging_columns=get_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as {{ dbt.type_string() }}) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n end_time\n \n as \n \n end_time\n \n, \n \n \n end_time_utc\n \n as \n \n end_time_utc\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n start_time\n \n as \n \n start_time\n \n, \n \n \n start_time_utc\n \n as \n \n start_time_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as TEXT) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user.sql", "original_file_path": "models/stg_zendesk__user.sql", "unique_id": "model.zendesk_source.stg_zendesk__user", "fqn": ["zendesk_source", "stg_zendesk__user"], "alias": "stg_zendesk__user", "checksum": {"name": "sha256", "checksum": "7227f84c3600cc310217efae6695bc0f6aea11b2392f5709a54d444a772a9d2c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"user_id": {"name": "user_id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active": {"name": "is_active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_suspended": {"name": "is_suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.69407, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__user_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tmp')),\n staging_columns=get_user_columns()\n )\n }}\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n _fivetran_deleted,\n cast(last_login_at as {{ dbt.type_timestamp() }}) as last_login_at,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n email,\n name,\n organization_id,\n phone,\n {% if var('internal_user_criteria', false) -%}\n case \n when role in ('admin', 'agent') then role\n when {{ var('internal_user_criteria', false) }} then 'agent'\n else role end as role,\n {% else -%}\n role,\n {% endif -%}\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__user_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n active\n \n as \n \n active\n \n, \n \n \n alias\n \n as \n \n alias\n \n, \n \n \n authenticity_token\n \n as \n \n authenticity_token\n \n, \n \n \n chat_only\n \n as \n \n chat_only\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n email\n \n as \n \n email\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n last_login_at\n \n as \n \n last_login_at\n \n, \n \n \n locale\n \n as \n \n locale\n \n, \n \n \n locale_id\n \n as \n \n locale_id\n \n, \n \n \n moderator\n \n as \n \n moderator\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n only_private_comments\n \n as \n \n only_private_comments\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n phone\n \n as \n \n phone\n \n, \n \n \n remote_photo_url\n \n as \n \n remote_photo_url\n \n, \n \n \n restricted_agent\n \n as \n \n restricted_agent\n \n, \n \n \n role\n \n as \n \n role\n \n, \n \n \n shared\n \n as \n \n shared\n \n, \n \n \n shared_agent\n \n as \n \n shared_agent\n \n, \n \n \n signature\n \n as \n \n signature\n \n, \n \n \n suspended\n \n as \n \n suspended\n \n, \n \n \n ticket_restriction\n \n as \n \n ticket_restriction\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n two_factor_auth_enabled\n \n as \n \n two_factor_auth_enabled\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n verified\n \n as \n \n verified\n \n\n\n\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n _fivetran_deleted,\n cast(last_login_at as timestamp) as last_login_at,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n email,\n name,\n organization_id,\n phone,\n role,\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__brand.sql", "original_file_path": "models/stg_zendesk__brand.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand", "fqn": ["zendesk_source", "stg_zendesk__brand"], "alias": "stg_zendesk__brand", "checksum": {"name": "sha256", "checksum": "106699200d371f2fac9fe94ce084a357331b215d4130195e1e94d2d07c6d169c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"brand_id": {"name": "brand_id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.688405, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__brand_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__brand_tmp')),\n staging_columns=get_brand_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__brand_tmp", "package": null, "version": null}, {"name": "stg_zendesk__brand_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_brand_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__brand_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__brand.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n brand_url\n \n as \n \n brand_url\n \n, \n \n \n has_help_center\n \n as \n \n has_help_center\n \n, \n \n \n help_center_state\n \n as \n \n help_center_state\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n logo_content_type\n \n as \n \n logo_content_type\n \n, \n \n \n logo_content_url\n \n as \n \n logo_content_url\n \n, \n \n \n logo_deleted\n \n as \n \n logo_deleted\n \n, \n \n \n logo_file_name\n \n as \n \n logo_file_name\n \n, \n \n \n logo_height\n \n as \n \n logo_height\n \n, \n \n \n logo_id\n \n as \n \n logo_id\n \n, \n \n \n logo_inline\n \n as \n \n logo_inline\n \n, \n \n \n logo_mapped_content_url\n \n as \n \n logo_mapped_content_url\n \n, \n \n \n logo_size\n \n as \n \n logo_size\n \n, \n \n \n logo_url\n \n as \n \n logo_url\n \n, \n \n \n logo_width\n \n as \n \n logo_width\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n subdomain\n \n as \n \n subdomain\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_form_history.sql", "original_file_path": "models/stg_zendesk__ticket_form_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_form_history"], "alias": "stg_zendesk__ticket_form_history", "checksum": {"name": "sha256", "checksum": "1e70e9a0b2dfce82e649a8a0507d59d6f3f2832429191ea67988ba0dfd1017cf"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"ticket_form_id": {"name": "ticket_form_id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6962502, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_form_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_form_history_tmp')),\n staging_columns=get_ticket_form_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_form_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_form_history.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n display_name\n \n as \n \n display_name\n \n, \n \n \n end_user_visible\n \n as \n \n end_user_visible\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__audit_log": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__audit_log.sql", "original_file_path": "models/stg_zendesk__audit_log.sql", "unique_id": "model.zendesk_source.stg_zendesk__audit_log", "fqn": ["zendesk_source", "stg_zendesk__audit_log"], "alias": "stg_zendesk__audit_log", "checksum": {"name": "sha256", "checksum": "590bb4a276a0927000ab959d9acc6545321c095a21f49bbae95c56934ba22b39"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The `audit_log` table captures historical changes and actions within Zendesk. It provides a record of modifications made to tickets, schedules, and other objects, allowing for a detailed audit trail. Each row represents an action performed by an actor, including the time of the action, the affected entity, and a description of the changes. This table is especially useful for tracking schedule modifications and maintaining a history of schedule changes.\n", "columns": {"audit_log_id": {"name": "audit_log_id", "description": "The unique identifier for each audit log entry, representing a distinct action or change.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "action": {"name": "action", "description": "Describes the specific action performed within Zendesk, such as ticket updates or schedule modifications.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "actor_id": {"name": "actor_id", "description": "The unique identifier of the user or system responsible for performing the action.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "change_description": {"name": "change_description", "description": "A detailed description of the changes made during the action, capturing what was altered.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The timestamp indicating when the action was performed and recorded in the audit log.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_id": {"name": "source_id", "description": "The unique identifier of the entity affected by the action, such as a ticket or schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_label": {"name": "source_label", "description": "A label that provides additional context about the affected entity, typically related to its type or name.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_type": {"name": "source_type", "description": "Specifies the type of entity impacted by the action, such as a ticket, schedule, or user.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_synced": {"name": "_fivetran_synced", "description": "The timestamp when the record was last synchronized by Fivetran, used to track data freshness.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6864011, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log\"", "raw_code": "{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_histories'])) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__audit_log_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__audit_log_tmp')),\n staging_columns=get_audit_log_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n select \n cast(id as {{ dbt.type_string() }}) as audit_log_id,\n action,\n actor_id,\n change_description,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n source_id,\n source_label,\n source_type,\n _fivetran_synced\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__audit_log_tmp", "package": null, "version": null}, {"name": "stg_zendesk__audit_log_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.zendesk_source.get_audit_log_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_string", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__audit_log_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__audit_log.sql", "compiled": true, "compiled_code": "\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n action\n \n as \n \n action\n \n, \n \n \n actor_id\n \n as \n \n actor_id\n \n, \n \n \n change_description\n \n as \n \n change_description\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n source_id\n \n as \n \n source_id\n \n, \n \n \n source_label\n \n as \n \n source_label\n \n, \n \n \n source_type\n \n as \n \n source_type\n \n\n\n\n \n from base\n),\n\nfinal as (\n select \n cast(id as TEXT) as audit_log_id,\n action,\n actor_id,\n change_description,\n cast(created_at as timestamp) as created_at,\n source_id,\n source_label,\n source_type,\n _fivetran_synced\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__domain_name.sql", "original_file_path": "models/stg_zendesk__domain_name.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name", "fqn": ["zendesk_source", "stg_zendesk__domain_name"], "alias": "stg_zendesk__domain_name", "checksum": {"name": "sha256", "checksum": "8c3a4735e0cdea5a463eefc3c6820d15d622857af45dab942410dc64a0ac4bda"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.6891232, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__domain_name_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__domain_name_tmp')),\n staging_columns=get_domain_name_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}, {"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_domain_name_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__domain_name.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n domain_name\n \n as \n \n domain_name\n \n, \n \n \n index\n \n as \n \n index\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization_tag.sql", "original_file_path": "models/stg_zendesk__organization_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag", "fqn": ["zendesk_source", "stg_zendesk__organization_tag"], "alias": "stg_zendesk__organization_tag", "checksum": {"name": "sha256", "checksum": "15f1f4014e4ba78ae7992f28c61e3926b7cd12c6bb32efc7b516db93c1e20d82"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.689887, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag\"", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tag_tmp')),\n staging_columns=get_organization_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket.sql", "original_file_path": "models/stg_zendesk__ticket.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket", "fqn": ["zendesk_source", "stg_zendesk__ticket"], "alias": "stg_zendesk__ticket", "checksum": {"name": "sha256", "checksum": "8a1201482d9f933a720698fa97c33d1499d5aeeaecd3706d97b3864b54eea531"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.687258, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tmp')),\n staging_columns=get_ticket_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n _fivetran_deleted,\n assignee_id,\n brand_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__ticket_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n allow_channelback\n \n as \n \n allow_channelback\n \n, \n \n \n assignee_id\n \n as \n \n assignee_id\n \n, \n \n \n brand_id\n \n as \n \n brand_id\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n description\n \n as \n \n description\n \n, \n \n \n due_at\n \n as \n \n due_at\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n forum_topic_id\n \n as \n \n forum_topic_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n has_incidents\n \n as \n \n has_incidents\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n is_public\n \n as \n \n is_public\n \n, \n \n \n merged_ticket_ids\n \n as \n \n merged_ticket_ids\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n priority\n \n as \n \n priority\n \n, \n \n \n problem_id\n \n as \n \n problem_id\n \n, \n \n \n recipient\n \n as \n \n recipient\n \n, \n \n \n requester_id\n \n as \n \n requester_id\n \n, \n \n \n status\n \n as \n \n status\n \n, \n \n \n subject\n \n as \n \n subject\n \n, \n \n \n submitter_id\n \n as \n \n submitter_id\n \n, \n cast(null as integer) as \n \n system_ccs\n \n , \n \n \n system_client\n \n as \n \n system_client\n \n, \n cast(null as TEXT) as \n \n system_ip_address\n \n , \n cast(null as integer) as \n \n system_json_email_identifier\n \n , \n cast(null as float) as \n \n system_latitude\n \n , \n cast(null as TEXT) as \n \n system_location\n \n , \n cast(null as float) as \n \n system_longitude\n \n , \n cast(null as integer) as \n \n system_machine_generated\n \n , \n cast(null as integer) as \n \n system_message_id\n \n , \n cast(null as integer) as \n \n system_raw_email_identifier\n \n , \n \n \n ticket_form_id\n \n as \n \n ticket_form_id\n \n, \n \n \n type\n \n as \n \n type\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n via_channel\n \n as \n \n via_channel\n \n, \n \n \n via_source_from_address\n \n as \n \n via_source_from_address\n \n, \n \n \n via_source_from_id\n \n as \n \n via_source_from_id\n \n, \n \n \n via_source_from_title\n \n as \n \n via_source_from_title\n \n, \n \n \n via_source_rel\n \n as \n \n via_source_rel\n \n, \n \n \n via_source_to_address\n \n as \n \n via_source_to_address\n \n, \n \n \n via_source_to_name\n \n as \n \n via_source_to_name\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n _fivetran_deleted,\n assignee_id,\n brand_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__daylight_time_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__daylight_time_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__daylight_time_tmp"], "alias": "stg_zendesk__daylight_time_tmp", "checksum": {"name": "sha256", "checksum": "01afb893cce2ef776ef8c4c64dbd2cf3e40fe1f73986fdc4b78fd99ff0948ac8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.407951, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'daylight_time')) }}\nfrom {{ source('zendesk', 'daylight_time') }} as daylight_time_table", "language": "sql", "refs": [], "sources": [["zendesk", "daylight_time"], ["zendesk", "daylight_time"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__daylight_time_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"year\",\n \"_fivetran_synced\",\n \"daylight_end_utc\",\n \"daylight_offset\",\n \"daylight_start_utc\"\nfrom \"postgres\".\"zz_zendesk\".\"daylight_time_data\" as daylight_time_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tmp"], "alias": "stg_zendesk__user_tmp", "checksum": {"name": "sha256", "checksum": "606364c3b138f68707d75a04f859f28d4b0f17f99966b27a8f6087adfa091042"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.4207711, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','user')) }} \nfrom {{ source('zendesk','user') }} as user_table", "language": "sql", "refs": [], "sources": [["zendesk", "user"], ["zendesk", "user"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"active\",\n \"alias\",\n \"authenticity_token\",\n \"chat_only\",\n \"created_at\",\n \"details\",\n \"email\",\n \"external_id\",\n \"last_login_at\",\n \"locale\",\n \"locale_id\",\n \"moderator\",\n \"name\",\n \"notes\",\n \"only_private_comments\",\n \"organization_id\",\n \"phone\",\n \"remote_photo_url\",\n \"restricted_agent\",\n \"role\",\n \"shared\",\n \"shared_agent\",\n \"signature\",\n \"suspended\",\n \"ticket_restriction\",\n \"time_zone\",\n \"two_factor_auth_enabled\",\n \"updated_at\",\n \"url\",\n \"verified\" \nfrom \"postgres\".\"zz_zendesk\".\"user_data\" as user_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__group_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__group_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__group_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__group_tmp"], "alias": "stg_zendesk__group_tmp", "checksum": {"name": "sha256", "checksum": "dc91ce1ab4b5ce5fec29b74b8f999d04fa063ab6354b7387d5875997f4db7e11"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.4243689, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','group')) }} \nfrom {{ source('zendesk','group') }} as group_table", "language": "sql", "refs": [], "sources": [["zendesk", "group"], ["zendesk", "group"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.group"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__group_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"created_at\",\n \"name\",\n \"updated_at\",\n \"url\" \nfrom \"postgres\".\"zz_zendesk\".\"group_data\" as group_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tmp"], "alias": "stg_zendesk__ticket_tmp", "checksum": {"name": "sha256", "checksum": "b90132a6d22e753a066ebeaaea0bc164376837b702d7886ad0d1bb1a993e6e9a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.427995, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket')) }}\nfrom {{ source('zendesk', 'ticket') }} as ticket_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket"], ["zendesk", "ticket"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"allow_channelback\",\n \"assignee_id\",\n \"brand_id\",\n \"created_at\",\n \"description\",\n \"due_at\",\n \"external_id\",\n \"forum_topic_id\",\n \"group_id\",\n \"has_incidents\",\n \"is_public\",\n \"organization_id\",\n \"priority\",\n \"problem_id\",\n \"recipient\",\n \"requester_id\",\n \"status\",\n \"subject\",\n \"submitter_id\",\n \"system_client\",\n \"ticket_form_id\",\n \"type\",\n \"updated_at\",\n \"url\",\n \"via_channel\",\n \"via_source_from_id\",\n \"via_source_from_title\",\n \"via_source_rel\",\n \"via_source_to_address\",\n \"via_source_to_name\",\n \"merged_ticket_ids\",\n \"via_source_from_address\",\n \"followup_ids\",\n \"via_followup_source_id\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_data\" as ticket_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__brand_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__brand_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__brand_tmp"], "alias": "stg_zendesk__brand_tmp", "checksum": {"name": "sha256", "checksum": "9658c9bd90fda5610067615a971eff98dc7c7b8c04827b9ab04da65f28630381"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.4331148, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','brand')) }} \nfrom {{ source('zendesk','brand') }} as brand_table", "language": "sql", "refs": [], "sources": [["zendesk", "brand"], ["zendesk", "brand"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.brand"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__brand_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"brand_url\",\n \"default\",\n \"has_help_center\",\n \"help_center_state\",\n \"logo_content_type\",\n \"logo_content_url\",\n \"logo_deleted\",\n \"logo_file_name\",\n \"logo_height\",\n \"logo_id\",\n \"logo_inline\",\n \"logo_mapped_content_url\",\n \"logo_size\",\n \"logo_url\",\n \"logo_width\",\n \"name\",\n \"subdomain\",\n \"url\" \nfrom \"postgres\".\"zz_zendesk\".\"brand_data\" as brand_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tag_tmp"], "alias": "stg_zendesk__ticket_tag_tmp", "checksum": {"name": "sha256", "checksum": "d88425c9db1a948768fa8683e58654de3aab9ffc2966d829b6707c12afd94283"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.437031, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_tag')) }}\nfrom {{ source('zendesk', 'ticket_tag') }} as ticket_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_tag"], ["zendesk", "ticket_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tag_tmp.sql", "compiled": true, "compiled_code": "select \"tag\",\n \"ticket_id\",\n \"_fivetran_synced\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_tag_data\" as ticket_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_holiday_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_holiday_tmp"], "alias": "stg_zendesk__schedule_holiday_tmp", "checksum": {"name": "sha256", "checksum": "caed8406693ab67a1ae858708ab0e22185d3c333ece3db5602b527bfeed8863e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.4410582, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"", "raw_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_holidays'])) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule_holiday')) }}\nfrom {{ source('zendesk', 'schedule_holiday') }} as schedule_holiday_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule_holiday"], ["zendesk", "schedule_holiday"]], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"schedule_id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_date\",\n \"name\",\n \"start_date\"\nfrom \"postgres\".\"zz_zendesk\".\"schedule_holiday_data\" as schedule_holiday_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tag_tmp"], "alias": "stg_zendesk__user_tag_tmp", "checksum": {"name": "sha256", "checksum": "7ee78431bec698af41296439428c74a8d5f8fa607c55e9b5a9b97de8b777f490"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.4449952, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','user_tag')) }} \nfrom {{ source('zendesk','user_tag') }} as user_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "user_tag"], ["zendesk", "user_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nselect \"tag\",\n \"user_id\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zz_zendesk\".\"user_tag_data\" as user_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_field_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_field_history_tmp"], "alias": "stg_zendesk__ticket_field_history_tmp", "checksum": {"name": "sha256", "checksum": "9dbb7257a2998c6e0d0d7a572aa7b0d301c777cea8e7085abfa42809b9312aa7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.4503849, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_field_history')) }}\nfrom {{ source('zendesk', 'ticket_field_history') }} as ticket_field_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_field_history"], ["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "compiled": true, "compiled_code": "select \"field_name\",\n \"ticket_id\",\n \"updated\",\n \"_fivetran_synced\",\n \"user_id\",\n \"value\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_field_history_data\" as ticket_field_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_form_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_form_history_tmp"], "alias": "stg_zendesk__ticket_form_history_tmp", "checksum": {"name": "sha256", "checksum": "0e95f65a6932c12231ef9419574fd09b287a70ca20612cce228a7fb642fe1609"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.4543612, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_form_history')) }}\nfrom {{ source('zendesk', 'ticket_form_history') }} as ticket_form_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_form_history"], ["zendesk", "ticket_form_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"updated_at\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"created_at\",\n \"display_name\",\n \"end_user_visible\",\n \"name\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_form_history_data\" as ticket_form_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_comment_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_comment_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_comment_tmp"], "alias": "stg_zendesk__ticket_comment_tmp", "checksum": {"name": "sha256", "checksum": "756209cf9e8c53e873cd7ac7a2dce2bdbafbd5a9d416e503c628b3ee57603c86"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.458412, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_comment')) }}\nfrom {{ source('zendesk', 'ticket_comment') }} as ticket_comment_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_comment"], ["zendesk", "ticket_comment"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_comment_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"body\",\n \"created\",\n \"facebook_comment\",\n \"public\",\n \"ticket_id\",\n \"tweet\",\n \"user_id\",\n \"voice_comment\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_comment_data\" as ticket_comment_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tag_tmp"], "alias": "stg_zendesk__organization_tag_tmp", "checksum": {"name": "sha256", "checksum": "b917812c188e64cda849a61d784cd95507c1c9187fc0ef2e083f2eee61c58231"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.461889, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','organization_tag')) }} \nfrom {{ source('zendesk','organization_tag') }} as organization_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization_tag"], ["zendesk", "organization_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nselect \"organization_id\",\n \"tag\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zz_zendesk\".\"organization_tag_data\" as organization_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__audit_log_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__audit_log_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__audit_log_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__audit_log_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__audit_log_tmp"], "alias": "stg_zendesk__audit_log_tmp", "checksum": {"name": "sha256", "checksum": "875185f07856608bdc8129d3ad2cef7ff5dfc2acdf252146ea493a69c889cfed"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.4661481, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log_tmp\"", "raw_code": "{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_histories'])) }}\n\nselect {{ dbt_utils.star(source('zendesk','audit_log')) }} \nfrom {{ source('zendesk','audit_log') }} as audit_log_table", "language": "sql", "refs": [], "sources": [["zendesk", "audit_log"], ["zendesk", "audit_log"]], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.enabled_vars", "macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.audit_log"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__audit_log_tmp.sql", "compiled": true, "compiled_code": "\n\nselect \"id\",\n \"_fivetran_synced\",\n \"action\",\n \"actor_id\",\n \"change_description\",\n \"created_at\",\n \"source_id\",\n \"source_label\",\n \"source_type\" \nfrom \"postgres\".\"zz_zendesk\".\"audit_log_data\" as audit_log_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_tmp"], "alias": "stg_zendesk__schedule_tmp", "checksum": {"name": "sha256", "checksum": "7d55acbaaa3cc93868bcd3fe4f945b1ecb4871da7b8bed7bf04714ce3fc11eef"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.470576, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule')) }}\nfrom {{ source('zendesk', 'schedule') }} as schedule_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule"], ["zendesk", "schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"end_time\",\n \"id\",\n \"start_time\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_time_utc\",\n \"name\",\n \"start_time_utc\",\n \"time_zone\",\n \"created_at\"\nfrom \"postgres\".\"zz_zendesk\".\"schedule_data\" as schedule_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tmp"], "alias": "stg_zendesk__organization_tmp", "checksum": {"name": "sha256", "checksum": "f2b39377f97f3a1a71fee168330c6971c06292c4ea702091a978eb64af9bd28f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728492761.474454, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'organization')) }}\nfrom {{ source('zendesk','organization') }} as organization_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization"], ["zendesk", "organization"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"created_at\",\n \"details\",\n \"external_id\",\n \"group_id\",\n \"name\",\n \"notes\",\n \"shared_comments\",\n \"shared_tickets\",\n \"updated_at\",\n \"url\"\nfrom \"postgres\".\"zz_zendesk\".\"organization_data\" as organization_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_schedule_tmp"], "alias": "stg_zendesk__ticket_schedule_tmp", "checksum": {"name": "sha256", "checksum": "59d017b8bb4285288bd47b79a1cb1afdb64faca436f52a718f6c8051d24cf6f1"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.4780102, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\n{%- set source_relation = adapter.get_relation(\n database=source('zendesk', 'ticket_schedule').database,\n schema=source('zendesk', 'ticket_schedule').schema,\n identifier=source('zendesk', 'ticket_schedule').name) -%}\n\n{% set table_exists=source_relation is not none %}\n\n{% if table_exists %}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_schedule')) }}\nfrom {{ source('zendesk', 'ticket_schedule') }} as ticket_schedule_table\n\n{% else %}\n\nselect\n cast(null as {{ dbt.type_timestamp() }}) as _fivetran_synced,\n cast(null as {{ dbt.type_timestamp() }}) as created_at,\n cast(null as {{ dbt.type_int() }}) as schedule_id,\n cast(null as {{ dbt.type_int() }}) as ticket_id\n\n{% endif %}", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\n\n\nselect \"created_at\",\n \"ticket_id\",\n \"_fivetran_synced\",\n \"schedule_id\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_schedule_data\" as ticket_schedule_table\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__domain_name_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__domain_name_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__domain_name_tmp"], "alias": "stg_zendesk__domain_name_tmp", "checksum": {"name": "sha256", "checksum": "58ba804a3f1cf2e7abe29a28cc9064e9be0355e6b358cca9e714e5777ff11b4b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.48525, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'domain_name')) }} \nfrom {{ source('zendesk', 'domain_name') }} as domain_name_table", "language": "sql", "refs": [], "sources": [["zendesk", "domain_name"], ["zendesk", "domain_name"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__domain_name_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nselect \"index\",\n \"organization_id\",\n \"_fivetran_synced\",\n \"domain_name\" \nfrom \"postgres\".\"zz_zendesk\".\"domain_name_data\" as domain_name_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__time_zone_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__time_zone_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__time_zone_tmp"], "alias": "stg_zendesk__time_zone_tmp", "checksum": {"name": "sha256", "checksum": "b2a214af27259564121fd0c977a7d7388bd644f797f972ed48575a4979819ec2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728492761.489658, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'time_zone')) }} \nfrom {{ source('zendesk', 'time_zone') }} as time_zone_table", "language": "sql", "refs": [], "sources": [["zendesk", "time_zone"], ["zendesk", "time_zone"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__time_zone_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"_fivetran_synced\",\n \"standard_offset\" \nfrom \"postgres\".\"zz_zendesk\".\"time_zone_data\" as time_zone_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef", "fqn": ["zendesk", "unique_zendesk__ticket_enriched_ticket_id"], "alias": "unique_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.66899, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}}, "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "fqn": ["zendesk", "not_null_zendesk__ticket_enriched_ticket_id"], "alias": "not_null_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.670188, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}}, "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__sla_policies_sla_event_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__sla_policies_sla_event_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd", "fqn": ["zendesk", "unique_zendesk__sla_policies_sla_event_id"], "alias": "unique_zendesk__sla_policies_sla_event_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.671063, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__sla_policies"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__sla_policies_sla_event_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n sla_event_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__sla_policies\"\nwhere sla_event_id is not null\ngroup by sla_event_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "sla_event_id", "file_key_name": "models.zendesk__sla_policies", "attached_node": "model.zendesk.zendesk__sla_policies", "test_metadata": {"name": "unique", "kwargs": {"column_name": "sla_event_id", "model": "{{ get_where_subquery(ref('zendesk__sla_policies')) }}"}, "namespace": null}}, "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c", "fqn": ["zendesk", "unique_zendesk__ticket_metrics_ticket_id"], "alias": "unique_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.672031, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}}, "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "fqn": ["zendesk", "not_null_zendesk__ticket_metrics_ticket_id"], "alias": "not_null_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.672837, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_ticket_id"], "alias": "unique_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.7014642, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_ticket_id"], "alias": "not_null_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.702423, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e", "fqn": ["zendesk_source", "unique_stg_zendesk__brand_brand_id"], "alias": "unique_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.703265, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n brand_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is not null\ngroup by brand_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand", "test_metadata": {"name": "unique", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "fqn": ["zendesk_source", "not_null_stg_zendesk__brand_brand_id"], "alias": "not_null_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.704643, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect brand_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__domain_name_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__domain_name_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3", "fqn": ["zendesk_source", "not_null_stg_zendesk__domain_name_organization_id"], "alias": "not_null_stg_zendesk__domain_name_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.705457, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__domain_name_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__domain_name", "attached_node": "model.zendesk_source.stg_zendesk__domain_name", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__domain_name')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd", "fqn": ["zendesk_source", "unique_stg_zendesk__group_group_id"], "alias": "unique_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.706281, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n group_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is not null\ngroup by group_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group", "test_metadata": {"name": "unique", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "fqn": ["zendesk_source", "not_null_stg_zendesk__group_group_id"], "alias": "not_null_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.707086, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect group_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31", "fqn": ["zendesk_source", "unique_stg_zendesk__organization_organization_id"], "alias": "unique_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.708081, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n organization_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is not null\ngroup by organization_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization", "test_metadata": {"name": "unique", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "fqn": ["zendesk_source", "not_null_stg_zendesk__organization_organization_id"], "alias": "not_null_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.708991, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.709823, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_comment_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is not null\ngroup by ticket_comment_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.710643, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_comment_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11", "fqn": ["zendesk_source", "unique_stg_zendesk__user_user_id"], "alias": "unique_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.711457, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n user_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is not null\ngroup by user_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user", "test_metadata": {"name": "unique", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "fqn": ["zendesk_source", "not_null_stg_zendesk__user_user_id"], "alias": "not_null_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.7123241, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect user_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_form_history_ticket_form_id"], "alias": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.71314, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_form_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\nwhere ticket_form_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_form_id", "file_key_name": "models.stg_zendesk__ticket_form_history", "attached_node": "model.zendesk_source.stg_zendesk__ticket_form_history", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_form_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_form_history')) }}"}, "namespace": null}}, "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year", "resource_type": "test", "package_name": "zendesk_source", "path": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d", "fqn": ["zendesk_source", "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year"], "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9"}, "created_at": 1728492761.713957, "relation_name": null, "raw_code": "{{ dbt_utils.test_unique_combination_of_columns(**_dbt_generic_test_kwargs) }}{{ config(alias=\"dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9\") }}", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.test_unique_combination_of_columns", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "compiled": true, "compiled_code": "\n\n\n\n\n\nwith validation_errors as (\n\n select\n time_zone, year\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n group by time_zone, year\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.stg_zendesk__daylight_time", "attached_node": "model.zendesk_source.stg_zendesk__daylight_time", "test_metadata": {"name": "unique_combination_of_columns", "kwargs": {"combination_of_columns": ["time_zone", "year"], "model": "{{ get_where_subquery(ref('stg_zendesk__daylight_time')) }}"}, "namespace": "dbt_utils"}}, "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf", "fqn": ["zendesk_source", "unique_stg_zendesk__time_zone_time_zone"], "alias": "unique_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.726451, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n time_zone as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is not null\ngroup by time_zone\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone", "test_metadata": {"name": "unique", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "fqn": ["zendesk_source", "not_null_stg_zendesk__time_zone_time_zone"], "alias": "not_null_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.727293, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect time_zone\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a", "fqn": ["zendesk_source", "unique_stg_zendesk__schedule_holiday_holiday_id"], "alias": "unique_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.728309, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n holiday_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is not null\ngroup by holiday_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday", "test_metadata": {"name": "unique", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "fqn": ["zendesk_source", "not_null_stg_zendesk__schedule_holiday_holiday_id"], "alias": "not_null_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728492761.729172, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect holiday_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}}}, "sources": {"source.zendesk_source.zendesk.audit_log": {"database": "postgres", "schema": "zz_zendesk", "name": "audit_log", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.audit_log", "fqn": ["zendesk_source", "zendesk", "audit_log"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "audit_log_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The `audit_log` table captures historical changes and actions within Zendesk. It provides a record of modifications made to tickets, schedules, and other objects, allowing for a detailed audit trail. Each row represents an action performed by an actor, including the time of the action, the affected entity, and a description of the changes. This table is especially useful for tracking schedule modifications and maintaining a history of schedule changes.\n", "columns": {"id": {"name": "id", "description": "The unique identifier for each audit log entry, representing a distinct action or change.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "action": {"name": "action", "description": "Describes the specific action performed within Zendesk, such as ticket updates or schedule modifications.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "actor_id": {"name": "actor_id", "description": "The unique identifier of the user or system responsible for performing the action.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "change_description": {"name": "change_description", "description": "A detailed description of the changes made during the action, capturing what was altered.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The timestamp indicating when the action was performed and recorded in the audit log.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_id": {"name": "source_id", "description": "The unique identifier of the entity affected by the action, such as a ticket or schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_label": {"name": "source_label", "description": "A label that provides additional context about the affected entity, typically related to its type or name.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_type": {"name": "source_type", "description": "Specifies the type of entity impacted by the action, such as a ticket, schedule, or user.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_synced": {"name": "_fivetran_synced", "description": "The timestamp when the record was last synchronized by Fivetran, used to track data freshness.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"audit_log_data\"", "created_at": 1728492761.798353}, "source.zendesk_source.zendesk.ticket": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket", "fqn": ["zendesk_source", "zendesk", "ticket"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_channel": {"name": "via_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_id": {"name": "via_source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_title": {"name": "via_source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_rel": {"name": "via_source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_address": {"name": "via_source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_name": {"name": "via_source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_data\"", "created_at": 1728492761.799576}, "source.zendesk_source.zendesk.brand": {"database": "postgres", "schema": "zz_zendesk", "name": "brand", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.brand", "fqn": ["zendesk_source", "zendesk", "brand"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "brand_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"id": {"name": "id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "created_at": 1728492761.799726}, "source.zendesk_source.zendesk.domain_name": {"database": "postgres", "schema": "zz_zendesk", "name": "domain_name", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.domain_name", "fqn": ["zendesk_source", "zendesk", "domain_name"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "domain_name_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"domain_name_data\"", "created_at": 1728492761.79984}, "source.zendesk_source.zendesk.group": {"database": "postgres", "schema": "zz_zendesk", "name": "group", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.group", "fqn": ["zendesk_source", "zendesk", "group"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "group_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"group_data\"", "created_at": 1728492761.8000891}, "source.zendesk_source.zendesk.organization_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization_tag", "fqn": ["zendesk_source", "zendesk", "organization_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "created_at": 1728492761.800256}, "source.zendesk_source.zendesk.organization": {"database": "postgres", "schema": "zz_zendesk", "name": "organization", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization", "fqn": ["zendesk_source", "zendesk", "organization"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique external id to associate organizations to an external record", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_names": {"name": "domain_names", "description": "An array of domain names associated with this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "notes": {"name": "notes", "description": "Any notes you have about the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "New tickets from users in this organization are automatically put in this group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_tickets": {"name": "shared_tickets", "description": "End users in this organization are able to see each other's tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_comments": {"name": "shared_comments", "description": "End users in this organization are able to see each other's comments on tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tags of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_fields": {"name": "organization_fields", "description": "Custom fields for this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_data\"", "created_at": 1728492761.800394}, "source.zendesk_source.zendesk.ticket_comment": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_comment", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_comment", "fqn": ["zendesk_source", "zendesk", "ticket_comment"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_comment_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created": {"name": "created", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "public": {"name": "public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "facebook_comment": {"name": "facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tweet": {"name": "tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "voice_comment": {"name": "voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_comment_data\"", "created_at": 1728492761.800514}, "source.zendesk_source.zendesk.user_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "user_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user_tag", "fqn": ["zendesk_source", "zendesk", "user_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "created_at": 1728492761.8006241}, "source.zendesk_source.zendesk.user": {"database": "postgres", "schema": "zz_zendesk", "name": "user", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user", "fqn": ["zendesk_source", "zendesk", "user"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended": {"name": "suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "created_at": 1728492761.800898}, "source.zendesk_source.zendesk.schedule": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule", "fqn": ["zendesk_source", "zendesk", "schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The support schedules created with different business hours and holidays.", "columns": {"id": {"name": "id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_data\"", "created_at": 1728492761.801013}, "source.zendesk_source.zendesk.ticket_schedule": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_schedule", "fqn": ["zendesk_source", "zendesk", "ticket_schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_schedule_data\"", "created_at": 1728492761.801111}, "source.zendesk_source.zendesk.ticket_form_history": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_form_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_form_history", "fqn": ["zendesk_source", "zendesk", "ticket_form_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_form_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_form_history_data\"", "created_at": 1728492761.801229}, "source.zendesk_source.zendesk.ticket_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_tag", "fqn": ["zendesk_source", "zendesk", "ticket_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_tag_data\"", "created_at": 1728492761.801322}, "source.zendesk_source.zendesk.ticket_field_history": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_field_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_field_history", "fqn": ["zendesk_source", "zendesk", "ticket_field_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_field_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated": {"name": "updated", "description": "The time the ticket field value was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"", "created_at": 1728492761.8014138}, "source.zendesk_source.zendesk.daylight_time": {"database": "postgres", "schema": "zz_zendesk", "name": "daylight_time", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.daylight_time", "fqn": ["zendesk_source", "zendesk", "daylight_time"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "daylight_time_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"daylight_time_data\"", "created_at": 1728492761.801513}, "source.zendesk_source.zendesk.time_zone": {"database": "postgres", "schema": "zz_zendesk", "name": "time_zone", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.time_zone", "fqn": ["zendesk_source", "zendesk", "time_zone"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "time_zone_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"time_zone_data\"", "created_at": 1728492761.801599}, "source.zendesk_source.zendesk.schedule_holiday": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_holiday", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule_holiday", "fqn": ["zendesk_source", "zendesk", "schedule_holiday"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_holiday_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Information about holidays for each specified schedule.", "columns": {"end_date": {"name": "end_date", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "id": {"name": "id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date": {"name": "start_date", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_holiday_data\"", "created_at": 1728492761.801696}}, "macros": {"macro.zendesk_integration_tests.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "zendesk_integration_tests", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.zendesk_integration_tests.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.49316, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.4935522, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.493763, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.493977, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.494117, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.494239, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.495817, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.4961832, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.49685, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.4969769, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5063639, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.506926, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.507224, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.507515, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.507949, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.508363, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5085282, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.508847, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.509205, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.50998, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.510172, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.510478, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.510738, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.511141, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.511352, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.512009, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.512244, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.512365, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5125399, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.512681, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.513246, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5140438, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.514205, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.514507, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.514643, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.514931, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.515996, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }}\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\t{{ ';' if not loop.last else \"\" }}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.516549, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config.model) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.516858, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}{{ ';' if not loop.last else \"\" }}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.517309, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5174599, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5181599, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.518333, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.518466, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5190039, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5191748, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.519386, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.520003, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.523454, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.523631, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.524121, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5245068, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.525536, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.525721, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.525861, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.525996, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.526128, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5264902, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5267699, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.527054, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.527479, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.527742, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.531219, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.53138, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5315871, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.532268, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.532425, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.532585, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.534254, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.535687, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5401, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.54038, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.540538, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.540624, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5407622, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.540875, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.541074, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5419059, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.542086, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.542321, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.542722, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.549417, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.552064, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.55318, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.553589, "supported_languages": null}, "macro.dbt.get_unit_test_sql": {"name": "get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_unit_test_sql", "macro_sql": "{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_unit_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.553868, "supported_languages": null}, "macro.dbt.default__get_unit_test_sql": {"name": "default__get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_unit_test_sql", "macro_sql": "{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n-- Build actual result given inputs\nwith dbt_internal_unit_test_actual as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal(\"actual\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ main_sql }}\n ) _dbt_internal_unit_test_actual\n),\n-- Build expected result\ndbt_internal_unit_test_expected as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal(\"expected\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ expected_fixture_sql }}\n ) _dbt_internal_unit_test_expected\n)\n-- Union actual and expected results\nselect * from dbt_internal_unit_test_actual\nunion all\nselect * from dbt_internal_unit_test_expected\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.554508, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5549939, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5553868, "supported_languages": null}, "macro.dbt.materialization_unit_default": {"name": "materialization_unit_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/unit.sql", "original_file_path": "macros/materializations/tests/unit.sql", "unique_id": "macro.dbt.materialization_unit_default", "macro_sql": "{%- materialization unit, default -%}\n\n {% set relations = [] %}\n\n {% set expected_rows = config.get('expected_rows') %}\n {% set expected_sql = config.get('expected_sql') %}\n {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %}\n {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}\n {%- set column_name_to_data_types = {} -%}\n {%- for column in columns_in_relation -%}\n {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n {%- endfor -%}\n\n {% if not expected_sql %}\n {% set expected_sql = get_expected_sql(expected_rows, column_name_to_data_types) %}\n {% endif %}\n {% set unit_test_sql = get_unit_test_sql(sql, expected_sql, tested_expected_column_names) %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ unit_test_sql }}\n\n {%- endcall %}\n\n {% do adapter.drop_relation(temp_relation) %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_empty_subquery_sql", "macro.dbt.get_expected_sql", "macro.dbt.get_unit_test_sql", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.557184, "supported_languages": ["sql"]}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.562051, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.56243, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.562669, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5642319, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5645282, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.565182, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.568099, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.571157, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.572953, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.573513, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.574379, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5746582, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5754151, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.582355, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5842829, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.584575, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.585524, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.585789, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5864162, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5870569, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.588054, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.588468, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.588723, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.589113, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.589359, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.5897012, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.589896, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.590218, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.590416, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.590573, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.590868, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.596176, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.601871, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.603025, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.604406, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6054149, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.605717, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.605846, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.606185, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6063302, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.610382, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6135008, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.618803, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6196392, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6198661, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6203258, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.620508, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6206348, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.62077, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.620883, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.621034, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.621147, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.621593, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.621768, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.622976, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6234188, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.623789, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.624575, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.625002, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6253288, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6257641, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.626014, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.626712, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6270661, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.627254, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6274502, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.627697, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.628494, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.629764, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.630202, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.630513, "supported_languages": null}, "macro.dbt.drop_schema_named": {"name": "drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.drop_schema_named", "macro_sql": "{% macro drop_schema_named(schema_name) %}\n {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema_named"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.630798, "supported_languages": null}, "macro.dbt.default__drop_schema_named": {"name": "default__drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.default__drop_schema_named", "macro_sql": "{% macro default__drop_schema_named(schema_name) %}\n {% set schema_relation = api.Relation.create(schema=schema_name) %}\n {{ adapter.drop_schema(schema_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6310048, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6313112, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.631505, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.632189, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6326, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.632799, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.633064, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.633398, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.633657, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6341162, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6347609, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.635179, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.635405, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{- adapter.dispatch('drop_materialized_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.635669, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.635825, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.636132, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.636371, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.636679, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.636814, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.637085, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.637234, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.637841, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.638026, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6383061, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.638447, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.638721, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.638855, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6398232, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.639939, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.640448, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6406028, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6407318, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.642003, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6423628, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.642689, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{- adapter.dispatch('drop_table', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6429288, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.643029, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6432881, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.643429, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.643687, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.643827, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.644872, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.645076, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6455412, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.646251, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6467059, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.646899, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.647072, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{- adapter.dispatch('drop_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.647342, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.647542, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.648458, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.648632, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.649888, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6500978, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.650317, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.650592, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.650741, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.65115, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6513128, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6514869, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.651891, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.652242, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.652525, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6527622, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.653304, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.654923, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.655521, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.655814, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.658012, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.659348, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.660094, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.660334, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.660579, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6606998, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.661471, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6621141, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.662372, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6627579, "supported_languages": null}, "macro.dbt.date": {"name": "date", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date.sql", "original_file_path": "macros/utils/date.sql", "unique_id": "macro.dbt.date", "macro_sql": "{% macro date(year, month, day) %}\n {{ return(adapter.dispatch('date', 'dbt') (year, month, day)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.663121, "supported_languages": null}, "macro.dbt.default__date": {"name": "default__date", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date.sql", "original_file_path": "macros/utils/date.sql", "unique_id": "macro.dbt.default__date", "macro_sql": "{% macro default__date(year, month, day) -%}\n {%- set dt = modules.datetime.date(year, month, day) -%}\n {%- set iso_8601_formatted_date = dt.strftime('%Y-%m-%d') -%}\n to_date('{{ iso_8601_formatted_date }}', 'YYYY-MM-DD')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6633978, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.663717, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.663875, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.664114, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.664233, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.665059, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.665478, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.66567, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.666178, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6664371, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.666548, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.66705, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.667308, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.667527, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6676009, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.66785, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.667985, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.668266, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.668397, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.66903, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.669419, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6697412, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6698968, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.670167, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.670298, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.670544, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.670696, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6709309, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.671083, "supported_languages": null}, "macro.dbt.cast": {"name": "cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.cast", "macro_sql": "{% macro cast(field, type) %}\n {{ return(adapter.dispatch('cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.671349, "supported_languages": null}, "macro.dbt.default__cast": {"name": "default__cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.default__cast", "macro_sql": "{% macro default__cast(field, type) %}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.671479, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.671713, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6718779, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.672158, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.67229, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.672529, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.672632, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6735868, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.673737, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.673901, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.674048, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6742, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6743429, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.674499, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.674668, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.674828, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6749709, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.675122, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.675261, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6754181, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6755562, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6758258, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.67605, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.676512, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.676634, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.676973, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6772292, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.677367, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.677878, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.678037, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.678268, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.678538, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.678667, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.679028, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.67926, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.679528, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.679659, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6800241, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.680201, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.68035, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.680524, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.680992, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.681362, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6815, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.681597, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.68175, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.681823, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.681977, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.682136, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.682944, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.683072, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.683221, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.683594, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.683771, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6838999, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.684048, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.684166, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6862001, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.686419, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.68673, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.687089, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6875288, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.687926, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.688137, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.688313, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.688641, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.68929, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.689536, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.689684, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.69011, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6905012, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6907809, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.690999, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.69268, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.692799, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.692959, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6930728, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.693405, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.69359, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.69369, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6939049, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.694165, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.694384, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.694565, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6947808, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.695423, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.695601, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6958332, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.69605, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.697414, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6981308, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6983428, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6984901, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6992362, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.699425, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.6996422, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.699817, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7000918, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.700754, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.703618, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.703875, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7040808, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.70433, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.704509, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.704662, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.704832, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.705066, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.705257, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7055418, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.705714, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.705868, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7060218, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.706167, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.706562, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.706748, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.709098, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.709257, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7096279, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.709835, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.71003, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.710201, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n {{ cast('null', col['data_type']) }} as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.711315, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.711643, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.711821, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.712151, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.712368, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.712924, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7131672, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.713896, "supported_languages": null}, "macro.dbt.get_fixture_sql": {"name": "get_fixture_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_fixture_sql", "macro_sql": "{% macro get_fixture_sql(rows, column_name_to_data_types) %}\n-- Fixture for {{ model.name }}\n{% set default_row = {} %}\n\n{%- if not column_name_to_data_types -%}\n{#-- Use defer_relation IFF it is available in the manifest and 'this' is missing from the database --#}\n{%- set this_or_defer_relation = defer_relation if (defer_relation and not load_relation(this)) else this -%}\n{%- set columns_in_relation = adapter.get_columns_in_relation(this_or_defer_relation) -%}\n\n{%- set column_name_to_data_types = {} -%}\n{%- for column in columns_in_relation -%}\n{#-- This needs to be a case-insensitive comparison --#}\n{%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n{%- endfor -%}\n{%- endif -%}\n\n{%- if not column_name_to_data_types -%}\n {{ exceptions.raise_compiler_error(\"Not able to get columns for unit test '\" ~ model.name ~ \"' from relation \" ~ this ~ \" because the relation doesn't exist\") }}\n{%- endif -%}\n\n{%- for column_name, column_type in column_name_to_data_types.items() -%}\n {%- do default_row.update({column_name: (safe_cast(\"null\", column_type) | trim )}) -%}\n{%- endfor -%}\n\n\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\n{%- set default_row_copy = default_row.copy() -%}\n{%- do default_row_copy.update(formatted_row) -%}\nselect\n{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n\n{%- if (rows | length) == 0 -%}\n select\n {%- for column_name, column_value in default_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%},{%- endif %}\n {%- endfor %}\n limit 0\n{%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_relation", "macro.dbt.safe_cast", "macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.717149, "supported_languages": null}, "macro.dbt.get_expected_sql": {"name": "get_expected_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_expected_sql", "macro_sql": "{% macro get_expected_sql(rows, column_name_to_data_types) %}\n\n{%- if (rows | length) == 0 -%}\n select * from dbt_internal_unit_test_actual\n limit 0\n{%- else -%}\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\nselect\n{%- for column_name, column_value in formatted_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7179022, "supported_languages": null}, "macro.dbt.format_row": {"name": "format_row", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.format_row", "macro_sql": "\n\n{%- macro format_row(row, column_name_to_data_types) -%}\n {#-- generate case-insensitive formatted row --#}\n {% set formatted_row = {} %}\n {%- for column_name, column_value in row.items() -%}\n {% set column_name = column_name|lower %}\n\n {%- if column_name not in column_name_to_data_types %}\n {#-- if user-provided row contains column name that relation does not contain, raise an error --#}\n {% set fixture_name = \"expected output\" if model.resource_type == 'unit_test' else (\"'\" ~ model.name ~ \"'\") %}\n {{ exceptions.raise_compiler_error(\n \"Invalid column name: '\" ~ column_name ~ \"' in unit test fixture for \" ~ fixture_name ~ \".\"\n \"\\nAccepted columns for \" ~ fixture_name ~ \" are: \" ~ (column_name_to_data_types.keys()|list)\n ) }}\n {%- endif -%}\n\n {%- set column_type = column_name_to_data_types[column_name] %}\n\n {#-- sanitize column_value: wrap yaml strings in quotes, apply cast --#}\n {%- set column_value_clean = column_value -%}\n {%- if column_value is string -%}\n {%- set column_value_clean = dbt.string_literal(dbt.escape_single_quotes(column_value)) -%}\n {%- elif column_value is none -%}\n {%- set column_value_clean = 'null' -%}\n {%- endif -%}\n\n {%- set row_update = {column_name: safe_cast(column_value_clean, column_type) } -%}\n {%- do formatted_row.update(row_update) -%}\n {%- endfor -%}\n {{ return(formatted_row) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.string_literal", "macro.dbt.escape_single_quotes", "macro.dbt.safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7190862, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.720931, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.72108, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.721858, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.722241, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.722791, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.723232, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.723302, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7238212, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.724055, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.724342, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.72462, "supported_languages": null}, "macro.dbt_utils.get_url_host": {"name": "get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.get_url_host", "macro_sql": "{% macro get_url_host(field) -%}\n {{ return(adapter.dispatch('get_url_host', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_host"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7249548, "supported_languages": null}, "macro.dbt_utils.default__get_url_host": {"name": "default__get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.default__get_url_host", "macro_sql": "{% macro default__get_url_host(field) -%}\n\n{%- set parsed =\n dbt.split_part(\n dbt.split_part(\n dbt.replace(\n dbt.replace(\n dbt.replace(field, \"'android-app://'\", \"''\"\n ), \"'http://'\", \"''\"\n ), \"'https://'\", \"''\"\n ), \"'/'\", 1\n ), \"'?'\", 1\n )\n\n-%}\n\n\n {{ dbt.safe_cast(\n parsed,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part", "macro.dbt.replace", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.725408, "supported_languages": null}, "macro.dbt_utils.get_url_path": {"name": "get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.get_url_path", "macro_sql": "{% macro get_url_path(field) -%}\n {{ return(adapter.dispatch('get_url_path', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_path"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.725869, "supported_languages": null}, "macro.dbt_utils.default__get_url_path": {"name": "default__get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.default__get_url_path", "macro_sql": "{% macro default__get_url_path(field) -%}\n\n {%- set stripped_url =\n dbt.replace(\n dbt.replace(field, \"'http://'\", \"''\"), \"'https://'\", \"''\")\n -%}\n\n {%- set first_slash_pos -%}\n coalesce(\n nullif({{ dbt.position(\"'/'\", stripped_url) }}, 0),\n {{ dbt.position(\"'?'\", stripped_url) }} - 1\n )\n {%- endset -%}\n\n {%- set parsed_path =\n dbt.split_part(\n dbt.right(\n stripped_url,\n dbt.length(stripped_url) ~ \"-\" ~ first_slash_pos\n ),\n \"'?'\", 1\n )\n -%}\n\n {{ dbt.safe_cast(\n parsed_path,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.position", "macro.dbt.split_part", "macro.dbt.right", "macro.dbt.length", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.72662, "supported_languages": null}, "macro.dbt_utils.get_url_parameter": {"name": "get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.get_url_parameter", "macro_sql": "{% macro get_url_parameter(field, url_parameter) -%}\n {{ return(adapter.dispatch('get_url_parameter', 'dbt_utils')(field, url_parameter)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.726998, "supported_languages": null}, "macro.dbt_utils.default__get_url_parameter": {"name": "default__get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.default__get_url_parameter", "macro_sql": "{% macro default__get_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"='\" -%}\n\n{%- set split = dbt.split_part(dbt.split_part(field, formatted_url_parameter, 2), \"'&'\", 1) -%}\n\nnullif({{ split }},'')\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.727325, "supported_languages": null}, "macro.dbt_utils.test_fewer_rows_than": {"name": "test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.test_fewer_rows_than", "macro_sql": "{% test fewer_rows_than(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_fewer_rows_than', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_fewer_rows_than"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7284968, "supported_languages": null}, "macro.dbt_utils.default__test_fewer_rows_than": {"name": "default__test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.default__test_fewer_rows_than", "macro_sql": "{% macro default__test_fewer_rows_than(model, compare_model, group_by_columns) %}\n\n{{ config(fail_calc = 'sum(coalesce(row_count_delta, 0))') }}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in equal_rowcount. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_fewer_rows_than'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_our_model \n from {{ model }}\n {{ groupby_gb_cols }}\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_comparison_model \n from {{ compare_model }}\n {{ groupby_gb_cols }}\n\n),\ncounts as (\n\n select\n\n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_our_model,\n count_comparison_model\n from a\n full join b on \n a.id_dbtutils_test_fewer_rows_than = b.id_dbtutils_test_fewer_rows_than\n {{ join_gb_cols }}\n\n),\nfinal as (\n\n select *,\n case\n -- fail the test if we have more rows than the reference model and return the row count delta\n when count_our_model > count_comparison_model then (count_our_model - count_comparison_model)\n -- fail the test if they are the same number\n when count_our_model = count_comparison_model then 1\n -- pass the test if the delta is positive (i.e. return the number 0)\n else 0\n end as row_count_delta\n from counts\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.729648, "supported_languages": null}, "macro.dbt_utils.test_equal_rowcount": {"name": "test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.test_equal_rowcount", "macro_sql": "{% test equal_rowcount(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_equal_rowcount', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equal_rowcount"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7306361, "supported_languages": null}, "macro.dbt_utils.default__test_equal_rowcount": {"name": "default__test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.default__test_equal_rowcount", "macro_sql": "{% macro default__test_equal_rowcount(model, compare_model, group_by_columns) %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = 'sum(coalesce(diff_count, 0))') }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(', ') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in fewer_rows_than. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_equal_rowcount'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_a \n from {{ model }}\n {{groupby_gb_cols}}\n\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_b \n from {{ compare_model }}\n {{groupby_gb_cols}}\n\n),\nfinal as (\n\n select\n \n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_a,\n count_b,\n abs(count_a - count_b) as diff_count\n\n from a\n full join b\n on\n a.id_dbtutils_test_equal_rowcount = b.id_dbtutils_test_equal_rowcount\n {{join_gb_cols}}\n\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.731723, "supported_languages": null}, "macro.dbt_utils.test_relationships_where": {"name": "test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.test_relationships_where", "macro_sql": "{% test relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n {{ return(adapter.dispatch('test_relationships_where', 'dbt_utils')(model, column_name, to, field, from_condition, to_condition)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_relationships_where"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.73248, "supported_languages": null}, "macro.dbt_utils.default__test_relationships_where": {"name": "default__test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.default__test_relationships_where", "macro_sql": "{% macro default__test_relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n\n{# T-SQL has no boolean data type so we use 1=1 which returns TRUE #}\n{# ref https://stackoverflow.com/a/7170753/3842610 #}\n\nwith left_table as (\n\n select\n {{column_name}} as id\n\n from {{model}}\n\n where {{column_name}} is not null\n and {{from_condition}}\n\n),\n\nright_table as (\n\n select\n {{field}} as id\n\n from {{to}}\n\n where {{field}} is not null\n and {{to_condition}}\n\n),\n\nexceptions as (\n\n select\n left_table.id,\n right_table.id as right_id\n\n from left_table\n\n left join right_table\n on left_table.id = right_table.id\n\n where right_table.id is null\n\n)\n\nselect * from exceptions\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7328472, "supported_languages": null}, "macro.dbt_utils.test_recency": {"name": "test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.test_recency", "macro_sql": "{% test recency(model, field, datepart, interval, ignore_time_component=False, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_recency', 'dbt_utils')(model, field, datepart, interval, ignore_time_component, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_recency"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.733557, "supported_languages": null}, "macro.dbt_utils.default__test_recency": {"name": "default__test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.default__test_recency", "macro_sql": "{% macro default__test_recency(model, field, datepart, interval, ignore_time_component, group_by_columns) %}\n\n{% set threshold = 'cast(' ~ dbt.dateadd(datepart, interval * -1, dbt.current_timestamp()) ~ ' as ' ~ ('date' if ignore_time_component else dbt.type_timestamp()) ~ ')' %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nwith recency as (\n\n select \n\n {{ select_gb_cols }}\n {% if ignore_time_component %}\n cast(max({{ field }}) as date) as most_recent\n {%- else %}\n max({{ field }}) as most_recent\n {%- endif %}\n\n from {{ model }}\n\n {{ groupby_gb_cols }}\n\n)\n\nselect\n\n {{ select_gb_cols }}\n most_recent,\n {{ threshold }} as threshold\n\nfrom recency\nwhere most_recent < {{ threshold }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.current_timestamp", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.734348, "supported_languages": null}, "macro.dbt_utils.test_not_constant": {"name": "test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.test_not_constant", "macro_sql": "{% test not_constant(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_constant', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_constant"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.734787, "supported_languages": null}, "macro.dbt_utils.default__test_not_constant": {"name": "default__test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.default__test_not_constant", "macro_sql": "{% macro default__test_not_constant(model, column_name, group_by_columns) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nselect\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count(distinct {{ column_name }}) as filler_column\n\nfrom {{ model }}\n\n {{groupby_gb_cols}}\n\nhaving count(distinct {{ column_name }}) = 1\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.735214, "supported_languages": null}, "macro.dbt_utils.test_accepted_range": {"name": "test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.test_accepted_range", "macro_sql": "{% test accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n {{ return(adapter.dispatch('test_accepted_range', 'dbt_utils')(model, column_name, min_value, max_value, inclusive)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_accepted_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.735799, "supported_languages": null}, "macro.dbt_utils.default__test_accepted_range": {"name": "default__test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.default__test_accepted_range", "macro_sql": "{% macro default__test_accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n\nwith meet_condition as(\n select *\n from {{ model }}\n),\n\nvalidation_errors as (\n select *\n from meet_condition\n where\n -- never true, defaults to an empty result set. Exists to ensure any combo of the `or` clauses below succeeds\n 1 = 2\n\n {%- if min_value is not none %}\n -- records with a value >= min_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} > {{- \"=\" if inclusive }} {{ min_value }}\n {%- endif %}\n\n {%- if max_value is not none %}\n -- records with a value <= max_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} < {{- \"=\" if inclusive }} {{ max_value }}\n {%- endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.736253, "supported_languages": null}, "macro.dbt_utils.test_not_accepted_values": {"name": "test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.test_not_accepted_values", "macro_sql": "{% test not_accepted_values(model, column_name, values, quote=True) %}\n {{ return(adapter.dispatch('test_not_accepted_values', 'dbt_utils')(model, column_name, values, quote)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7370298, "supported_languages": null}, "macro.dbt_utils.default__test_not_accepted_values": {"name": "default__test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.default__test_not_accepted_values", "macro_sql": "{% macro default__test_not_accepted_values(model, column_name, values, quote=True) %}\nwith all_values as (\n\n select distinct\n {{ column_name }} as value_field\n\n from {{ model }}\n\n),\n\nvalidation_errors as (\n\n select\n value_field\n\n from all_values\n where value_field in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n )\n\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7374878, "supported_languages": null}, "macro.dbt_utils.test_at_least_one": {"name": "test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.test_at_least_one", "macro_sql": "{% test at_least_one(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_at_least_one', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_at_least_one"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.738166, "supported_languages": null}, "macro.dbt_utils.default__test_at_least_one": {"name": "default__test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.default__test_at_least_one", "macro_sql": "{% macro default__test_at_least_one(model, column_name, group_by_columns) %}\n\n{% set pruned_cols = [column_name] %}\n\n{% if group_by_columns|length() > 0 %}\n\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n {% set pruned_cols = group_by_columns %}\n\n {% if column_name not in pruned_cols %}\n {% do pruned_cols.append(column_name) %}\n {% endif %}\n\n{% endif %}\n\n{% set select_pruned_cols = pruned_cols|join(' ,') %}\n\nselect *\nfrom (\n with pruned_rows as (\n select\n {{ select_pruned_cols }}\n from {{ model }}\n {% if group_by_columns|length() == 0 %}\n where {{ column_name }} is not null\n limit 1\n {% endif %}\n )\n select\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count({{ column_name }}) as filler_column\n\n from pruned_rows\n\n {{groupby_gb_cols}}\n\n having count({{ column_name }}) = 0\n\n) validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.739041, "supported_languages": null}, "macro.dbt_utils.test_unique_combination_of_columns": {"name": "test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.test_unique_combination_of_columns", "macro_sql": "{% test unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n {{ return(adapter.dispatch('test_unique_combination_of_columns', 'dbt_utils')(model, combination_of_columns, quote_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_unique_combination_of_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.73974, "supported_languages": null}, "macro.dbt_utils.default__test_unique_combination_of_columns": {"name": "default__test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.default__test_unique_combination_of_columns", "macro_sql": "{% macro default__test_unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n\n{% if not quote_columns %}\n {%- set column_list=combination_of_columns %}\n{% elif quote_columns %}\n {%- set column_list=[] %}\n {% for column in combination_of_columns -%}\n {% set column_list = column_list.append( adapter.quote(column) ) %}\n {%- endfor %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`quote_columns` argument for unique_combination_of_columns test must be one of [True, False] Got: '\" ~ quote ~\"'.'\"\n ) }}\n{% endif %}\n\n{%- set columns_csv=column_list | join(', ') %}\n\n\nwith validation_errors as (\n\n select\n {{ columns_csv }}\n from {{ model }}\n group by {{ columns_csv }}\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.740489, "supported_languages": null}, "macro.dbt_utils.test_cardinality_equality": {"name": "test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.test_cardinality_equality", "macro_sql": "{% test cardinality_equality(model, column_name, to, field) %}\n {{ return(adapter.dispatch('test_cardinality_equality', 'dbt_utils')(model, column_name, to, field)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_cardinality_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.741108, "supported_languages": null}, "macro.dbt_utils.default__test_cardinality_equality": {"name": "default__test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.default__test_cardinality_equality", "macro_sql": "{% macro default__test_cardinality_equality(model, column_name, to, field) %}\n\n{# T-SQL does not let you use numbers as aliases for columns #}\n{# Thus, no \"GROUP BY 1\" #}\n\nwith table_a as (\nselect\n {{ column_name }},\n count(*) as num_rows\nfrom {{ model }}\ngroup by {{ column_name }}\n),\n\ntable_b as (\nselect\n {{ field }},\n count(*) as num_rows\nfrom {{ to }}\ngroup by {{ field }}\n),\n\nexcept_a as (\n select *\n from table_a\n {{ dbt.except() }}\n select *\n from table_b\n),\n\nexcept_b as (\n select *\n from table_b\n {{ dbt.except() }}\n select *\n from table_a\n),\n\nunioned as (\n select *\n from except_a\n union all\n select *\n from except_b\n)\n\nselect *\nfrom unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7414489, "supported_languages": null}, "macro.dbt_utils.test_expression_is_true": {"name": "test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.test_expression_is_true", "macro_sql": "{% test expression_is_true(model, expression, column_name=None) %}\n {{ return(adapter.dispatch('test_expression_is_true', 'dbt_utils')(model, expression, column_name)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_expression_is_true"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7418408, "supported_languages": null}, "macro.dbt_utils.default__test_expression_is_true": {"name": "default__test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.default__test_expression_is_true", "macro_sql": "{% macro default__test_expression_is_true(model, expression, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else \"1\" %}\n\nselect\n {{ column_list }}\nfrom {{ model }}\n{% if column_name is none %}\nwhere not({{ expression }})\n{%- else %}\nwhere not({{ column_name }} {{ expression }})\n{%- endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.742188, "supported_languages": null}, "macro.dbt_utils.test_not_null_proportion": {"name": "test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.test_not_null_proportion", "macro_sql": "{% macro test_not_null_proportion(model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_null_proportion', 'dbt_utils')(model, group_by_columns, **kwargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_null_proportion"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.742846, "supported_languages": null}, "macro.dbt_utils.default__test_not_null_proportion": {"name": "default__test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.default__test_not_null_proportion", "macro_sql": "{% macro default__test_not_null_proportion(model, group_by_columns) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('arg')) %}\n{% set at_least = kwargs.get('at_least', kwargs.get('arg')) %}\n{% set at_most = kwargs.get('at_most', kwargs.get('arg', 1)) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith validation as (\n select\n {{select_gb_cols}}\n sum(case when {{ column_name }} is null then 0 else 1 end) / cast(count(*) as {{ dbt.type_numeric() }}) as not_null_proportion\n from {{ model }}\n {{groupby_gb_cols}}\n),\nvalidation_errors as (\n select\n {{select_gb_cols}}\n not_null_proportion\n from validation\n where not_null_proportion < {{ at_least }} or not_null_proportion > {{ at_most }}\n)\nselect\n *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.743659, "supported_languages": null}, "macro.dbt_utils.test_sequential_values": {"name": "test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.test_sequential_values", "macro_sql": "{% test sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n {{ return(adapter.dispatch('test_sequential_values', 'dbt_utils')(model, column_name, interval, datepart, group_by_columns)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_sequential_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7445571, "supported_languages": null}, "macro.dbt_utils.default__test_sequential_values": {"name": "default__test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.default__test_sequential_values", "macro_sql": "{% macro default__test_sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n{% set previous_column_name = \"previous_\" ~ dbt_utils.slugify(column_name) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(',') + ', ' %}\n {% set partition_gb_cols = 'partition by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith windowed as (\n\n select\n {{ select_gb_cols }}\n {{ column_name }},\n lag({{ column_name }}) over (\n {{partition_gb_cols}}\n order by {{ column_name }}\n ) as {{ previous_column_name }}\n from {{ model }}\n),\n\nvalidation_errors as (\n select\n *\n from windowed\n {% if datepart %}\n where not(cast({{ column_name }} as {{ dbt.type_timestamp() }})= cast({{ dbt.dateadd(datepart, interval, previous_column_name) }} as {{ dbt.type_timestamp() }}))\n {% else %}\n where not({{ column_name }} = {{ previous_column_name }} + {{ interval }})\n {% endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.slugify", "macro.dbt.type_timestamp", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.745416, "supported_languages": null}, "macro.dbt_utils.test_equality": {"name": "test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.test_equality", "macro_sql": "{% test equality(model, compare_model, compare_columns=None, exclude_columns=None, precision = None) %}\n {{ return(adapter.dispatch('test_equality', 'dbt_utils')(model, compare_model, compare_columns, exclude_columns, precision)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7476249, "supported_languages": null}, "macro.dbt_utils.default__test_equality": {"name": "default__test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.default__test_equality", "macro_sql": "{% macro default__test_equality(model, compare_model, compare_columns=None, exclude_columns=None, precision = None) %}\n\n{%- if compare_columns and exclude_columns -%}\n {{ exceptions.raise_compiler_error(\"Both a compare and an ignore list were provided to the `equality` macro. Only one is allowed\") }}\n{%- endif -%}\n\n{% set set_diff %}\n count(*) + coalesce(abs(\n sum(case when which_diff = 'a_minus_b' then 1 else 0 end) -\n sum(case when which_diff = 'b_minus_a' then 1 else 0 end)\n ), 0)\n{% endset %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = set_diff) }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n\n\n-- setup\n{%- do dbt_utils._is_relation(model, 'test_equality') -%}\n\n{# Ensure there are no extra columns in the compare_model vs model #}\n{%- if not compare_columns -%}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- do dbt_utils._is_ephemeral(compare_model, 'test_equality') -%}\n\n {%- set model_columns = adapter.get_columns_in_relation(model) -%}\n {%- set compare_model_columns = adapter.get_columns_in_relation(compare_model) -%}\n\n\n {%- if exclude_columns -%}\n {#-- Lower case ignore columns for easier comparison --#}\n {%- set exclude_columns = exclude_columns | map(\"lower\") | list %}\n\n {# Filter out the excluded columns #}\n {%- set include_columns = [] %}\n {%- set include_model_columns = [] %}\n {%- for column in model_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n {%- for column in compare_model_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_model_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n\n {%- set compare_columns_set = set(include_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- set compare_model_columns_set = set(include_model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- else -%}\n {%- set compare_columns_set = set(model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- set compare_model_columns_set = set(compare_model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- endif -%}\n\n {% if compare_columns_set != compare_model_columns_set %}\n {{ exceptions.raise_compiler_error(compare_model ~\" has less columns than \" ~ model ~ \", please ensure they have the same columns or use the `compare_columns` or `exclude_columns` arguments to subset them.\") }}\n {% endif %}\n\n\n{% endif %}\n\n{%- if not precision -%}\n {%- if not compare_columns -%}\n {# \n You cannot get the columns in an ephemeral model (due to not existing in the information schema),\n so if the user does not provide an explicit list of columns we must error in the case it is ephemeral\n #}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- set compare_columns = adapter.get_columns_in_relation(model)-%}\n\n {%- if exclude_columns -%}\n {#-- Lower case ignore columns for easier comparison --#}\n {%- set exclude_columns = exclude_columns | map(\"lower\") | list %}\n\n {# Filter out the excluded columns #}\n {%- set include_columns = [] %}\n {%- for column in compare_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n\n {%- set compare_columns = include_columns | map(attribute='quoted') %}\n {%- else -%} {# Compare columns provided #}\n {%- set compare_columns = compare_columns | map(attribute='quoted') %}\n {%- endif -%}\n {%- endif -%}\n\n {% set compare_cols_csv = compare_columns | join(', ') %}\n\n{% else %} {# Precision required #}\n {#-\n If rounding is required, we need to get the types, so it cannot be ephemeral even if they provide column names\n -#}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- set columns = adapter.get_columns_in_relation(model) -%}\n\n {% set columns_list = [] %}\n {%- for col in columns -%}\n {%- if (\n (col.name|lower in compare_columns|map('lower') or not compare_columns) and\n (col.name|lower not in exclude_columns|map('lower') or not exclude_columns)\n ) -%}\n {# Databricks double type is not picked up by any number type checks in dbt #}\n {%- if col.is_float() or col.is_numeric() or col.data_type == 'double' -%}\n {# Cast is required due to postgres not having round for a double precision number #}\n {%- do columns_list.append('round(cast(' ~ col.quoted ~ ' as ' ~ dbt.type_numeric() ~ '),' ~ precision ~ ') as ' ~ col.quoted) -%}\n {%- else -%} {# Non-numeric type #}\n {%- do columns_list.append(col.quoted) -%}\n {%- endif -%}\n {% endif %}\n {%- endfor -%}\n\n {% set compare_cols_csv = columns_list | join(', ') %}\n\n{% endif %}\n\nwith a as (\n\n select * from {{ model }}\n\n),\n\nb as (\n\n select * from {{ compare_model }}\n\n),\n\na_minus_b as (\n\n select {{compare_cols_csv}} from a\n {{ dbt.except() }}\n select {{compare_cols_csv}} from b\n\n),\n\nb_minus_a as (\n\n select {{compare_cols_csv}} from b\n {{ dbt.except() }}\n select {{compare_cols_csv}} from a\n\n),\n\nunioned as (\n\n select 'a_minus_b' as which_diff, a_minus_b.* from a_minus_b\n union all\n select 'b_minus_a' as which_diff, b_minus_a.* from b_minus_a\n\n)\n\nselect * from unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.type_numeric", "macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.751712, "supported_languages": null}, "macro.dbt_utils.test_not_empty_string": {"name": "test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.test_not_empty_string", "macro_sql": "{% test not_empty_string(model, column_name, trim_whitespace=true) %}\n\n {{ return(adapter.dispatch('test_not_empty_string', 'dbt_utils')(model, column_name, trim_whitespace)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_empty_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7522519, "supported_languages": null}, "macro.dbt_utils.default__test_not_empty_string": {"name": "default__test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.default__test_not_empty_string", "macro_sql": "{% macro default__test_not_empty_string(model, column_name, trim_whitespace=true) %}\n\n with\n \n all_values as (\n\n select \n\n\n {% if trim_whitespace == true -%}\n\n trim({{ column_name }}) as {{ column_name }}\n\n {%- else -%}\n\n {{ column_name }}\n\n {%- endif %}\n \n from {{ model }}\n\n ),\n\n errors as (\n\n select * from all_values\n where {{ column_name }} = ''\n\n )\n\n select * from errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.752539, "supported_languages": null}, "macro.dbt_utils.test_mutually_exclusive_ranges": {"name": "test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.test_mutually_exclusive_ranges", "macro_sql": "{% test mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n {{ return(adapter.dispatch('test_mutually_exclusive_ranges', 'dbt_utils')(model, lower_bound_column, upper_bound_column, partition_by, gaps, zero_length_range_allowed)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_mutually_exclusive_ranges"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.756105, "supported_languages": null}, "macro.dbt_utils.default__test_mutually_exclusive_ranges": {"name": "default__test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.default__test_mutually_exclusive_ranges", "macro_sql": "{% macro default__test_mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n{% if gaps == 'not_allowed' %}\n {% set allow_gaps_operator='=' %}\n {% set allow_gaps_operator_in_words='equal_to' %}\n{% elif gaps == 'allowed' %}\n {% set allow_gaps_operator='<=' %}\n {% set allow_gaps_operator_in_words='less_than_or_equal_to' %}\n{% elif gaps == 'required' %}\n {% set allow_gaps_operator='<' %}\n {% set allow_gaps_operator_in_words='less_than' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`gaps` argument for mutually_exclusive_ranges test must be one of ['not_allowed', 'allowed', 'required'] Got: '\" ~ gaps ~\"'.'\"\n ) }}\n{% endif %}\n{% if not zero_length_range_allowed %}\n {% set allow_zero_length_operator='<' %}\n {% set allow_zero_length_operator_in_words='less_than' %}\n{% elif zero_length_range_allowed %}\n {% set allow_zero_length_operator='<=' %}\n {% set allow_zero_length_operator_in_words='less_than_or_equal_to' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`zero_length_range_allowed` argument for mutually_exclusive_ranges test must be one of [true, false] Got: '\" ~ zero_length_range_allowed ~\"'.'\"\n ) }}\n{% endif %}\n\n{% set partition_clause=\"partition by \" ~ partition_by if partition_by else '' %}\n\nwith window_functions as (\n\n select\n {% if partition_by %}\n {{ partition_by }} as partition_by_col,\n {% endif %}\n {{ lower_bound_column }} as lower_bound,\n {{ upper_bound_column }} as upper_bound,\n\n lead({{ lower_bound_column }}) over (\n {{ partition_clause }}\n order by {{ lower_bound_column }}, {{ upper_bound_column }}\n ) as next_lower_bound,\n\n row_number() over (\n {{ partition_clause }}\n order by {{ lower_bound_column }} desc, {{ upper_bound_column }} desc\n ) = 1 as is_last_record\n\n from {{ model }}\n\n),\n\ncalc as (\n -- We want to return records where one of our assumptions fails, so we'll use\n -- the `not` function with `and` statements so we can write our assumptions more cleanly\n select\n *,\n\n -- For each record: lower_bound should be < upper_bound.\n -- Coalesce it to return an error on the null case (implicit assumption\n -- these columns are not_null)\n coalesce(\n lower_bound {{ allow_zero_length_operator }} upper_bound,\n false\n ) as lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound,\n\n -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound.\n -- Coalesce it to handle null cases for the last record.\n coalesce(\n upper_bound {{ allow_gaps_operator }} next_lower_bound,\n is_last_record,\n false\n ) as upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n\n from window_functions\n\n),\n\nvalidation_errors as (\n\n select\n *\n from calc\n\n where not(\n -- THE FOLLOWING SHOULD BE TRUE --\n lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound\n and upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n )\n)\n\nselect * from validation_errors\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.757956, "supported_languages": null}, "macro.dbt_utils.pretty_log_format": {"name": "pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.pretty_log_format", "macro_sql": "{% macro pretty_log_format(message) %}\n {{ return(adapter.dispatch('pretty_log_format', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7583182, "supported_languages": null}, "macro.dbt_utils.default__pretty_log_format": {"name": "default__pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.default__pretty_log_format", "macro_sql": "{% macro default__pretty_log_format(message) %}\n {{ return( dbt_utils.pretty_time() ~ ' + ' ~ message) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.75849, "supported_languages": null}, "macro.dbt_utils._is_relation": {"name": "_is_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_relation.sql", "original_file_path": "macros/jinja_helpers/_is_relation.sql", "unique_id": "macro.dbt_utils._is_relation", "macro_sql": "{% macro _is_relation(obj, macro) %}\n {%- if not (obj is mapping and obj.get('metadata', {}).get('type', '').endswith('Relation')) -%}\n {%- do exceptions.raise_compiler_error(\"Macro \" ~ macro ~ \" expected a Relation but received the value: \" ~ obj) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.758945, "supported_languages": null}, "macro.dbt_utils.pretty_time": {"name": "pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.pretty_time", "macro_sql": "{% macro pretty_time(format='%H:%M:%S') %}\n {{ return(adapter.dispatch('pretty_time', 'dbt_utils')(format)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.759223, "supported_languages": null}, "macro.dbt_utils.default__pretty_time": {"name": "default__pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.default__pretty_time", "macro_sql": "{% macro default__pretty_time(format='%H:%M:%S') %}\n {{ return(modules.datetime.datetime.now().strftime(format)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.759409, "supported_languages": null}, "macro.dbt_utils.log_info": {"name": "log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.log_info", "macro_sql": "{% macro log_info(message) %}\n {{ return(adapter.dispatch('log_info', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__log_info"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.759661, "supported_languages": null}, "macro.dbt_utils.default__log_info": {"name": "default__log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.default__log_info", "macro_sql": "{% macro default__log_info(message) %}\n {{ log(dbt_utils.pretty_log_format(message), info=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7598348, "supported_languages": null}, "macro.dbt_utils.slugify": {"name": "slugify", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/slugify.sql", "original_file_path": "macros/jinja_helpers/slugify.sql", "unique_id": "macro.dbt_utils.slugify", "macro_sql": "{% macro slugify(string) %}\n\n{% if not string %}\n{{ return('') }}\n{% endif %}\n\n{#- Lower case the string -#}\n{% set string = string | lower %}\n{#- Replace spaces and dashes with underscores -#}\n{% set string = modules.re.sub('[ -]+', '_', string) %}\n{#- Only take letters, numbers, and underscores -#}\n{% set string = modules.re.sub('[^a-z0-9_]+', '', string) %}\n{#- Prepends \"_\" if string begins with a number -#}\n{% set string = modules.re.sub('^[0-9]', '_' + string[0], string) %}\n\n{{ return(string) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.760593, "supported_languages": null}, "macro.dbt_utils._is_ephemeral": {"name": "_is_ephemeral", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_ephemeral.sql", "original_file_path": "macros/jinja_helpers/_is_ephemeral.sql", "unique_id": "macro.dbt_utils._is_ephemeral", "macro_sql": "{% macro _is_ephemeral(obj, macro) %}\n {%- if obj.is_cte -%}\n {% set ephemeral_prefix = api.Relation.add_ephemeral_prefix('') %}\n {% if obj.name.startswith(ephemeral_prefix) %}\n {% set model_name = obj.name[(ephemeral_prefix|length):] %}\n {% else %}\n {% set model_name = obj.name %}\n {%- endif -%}\n {% set error_message %}\nThe `{{ macro }}` macro cannot be used with ephemeral models, as it relies on the information schema.\n\n`{{ model_name }}` is an ephemeral model. Consider making it a view or table instead.\n {% endset %}\n {%- do exceptions.raise_compiler_error(error_message) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7614138, "supported_languages": null}, "macro.dbt_utils.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_utils')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.762176, "supported_languages": null}, "macro.dbt_utils.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.762826, "supported_languages": null}, "macro.dbt_utils.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_utils')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.763065, "supported_languages": null}, "macro.dbt_utils.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{dbt_utils.generate_series(\n dbt_utils.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.generate_series", "macro.dbt_utils.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.763436, "supported_languages": null}, "macro.dbt_utils.safe_subtract": {"name": "safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.safe_subtract", "macro_sql": "{%- macro safe_subtract(field_list) -%}\n {{ return(adapter.dispatch('safe_subtract', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_subtract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7638478, "supported_languages": null}, "macro.dbt_utils.default__safe_subtract": {"name": "default__safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.default__safe_subtract", "macro_sql": "\n\n{%- macro default__safe_subtract(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_subtract` macro takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' -\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.764514, "supported_languages": null}, "macro.dbt_utils.nullcheck_table": {"name": "nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.nullcheck_table", "macro_sql": "{% macro nullcheck_table(relation) %}\n {{ return(adapter.dispatch('nullcheck_table', 'dbt_utils')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7648509, "supported_languages": null}, "macro.dbt_utils.default__nullcheck_table": {"name": "default__nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.default__nullcheck_table", "macro_sql": "{% macro default__nullcheck_table(relation) %}\n\n {%- do dbt_utils._is_relation(relation, 'nullcheck_table') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'nullcheck_table') -%}\n {% set cols = adapter.get_columns_in_relation(relation) %}\n\n select {{ dbt_utils.nullcheck(cols) }}\n from {{relation}}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.765196, "supported_languages": null}, "macro.dbt_utils.get_relations_by_pattern": {"name": "get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.get_relations_by_pattern", "macro_sql": "{% macro get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_pattern', 'dbt_utils')(schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7659352, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_pattern": {"name": "default__get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_pattern", "macro_sql": "{% macro default__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.766989, "supported_languages": null}, "macro.dbt_utils.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.768021, "supported_languages": null}, "macro.dbt_utils.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7684429, "supported_languages": null}, "macro.dbt_utils.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.768638, "supported_languages": null}, "macro.dbt_utils.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_utils.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7691898, "supported_languages": null}, "macro.dbt_utils.get_relations_by_prefix": {"name": "get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.get_relations_by_prefix", "macro_sql": "{% macro get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_prefix', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.769943, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_prefix": {"name": "default__get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_prefix", "macro_sql": "{% macro default__get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_prefix_sql(schema, prefix, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.770859, "supported_languages": null}, "macro.dbt_utils.get_tables_by_prefix_sql": {"name": "get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_prefix_sql", "macro_sql": "{% macro get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_prefix_sql', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.771272, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_prefix_sql": {"name": "default__get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_prefix_sql", "macro_sql": "{% macro default__get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(\n schema_pattern = schema,\n table_pattern = prefix ~ '%',\n exclude = exclude,\n database = database\n ) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.77155, "supported_languages": null}, "macro.dbt_utils.star": {"name": "star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.star", "macro_sql": "{% macro star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {{ return(adapter.dispatch('star', 'dbt_utils')(from, relation_alias, except, prefix, suffix, quote_identifiers)) }}\r\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__star"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.772904, "supported_languages": null}, "macro.dbt_utils.default__star": {"name": "default__star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.default__star", "macro_sql": "{% macro default__star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {%- do dbt_utils._is_relation(from, 'star') -%}\r\n {%- do dbt_utils._is_ephemeral(from, 'star') -%}\r\n\r\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\r\n {%- if not execute -%}\r\n {% do return('*') %}\r\n {%- endif -%}\r\n\r\n {% set cols = dbt_utils.get_filtered_columns_in_relation(from, except) %}\r\n\r\n {%- if cols|length <= 0 -%}\r\n {% if flags.WHICH == 'compile' %}\r\n {% set response %}\r\n*\r\n/* No columns were returned. Maybe the relation doesn't exist yet \r\nor all columns were excluded. This star is only output during \r\ndbt compile, and exists to keep SQLFluff happy. */\r\n {% endset %}\r\n {% do return(response) %}\r\n {% else %}\r\n {% do return(\"/* no columns returned from star() macro */\") %}\r\n {% endif %}\r\n {%- else -%}\r\n {%- for col in cols %}\r\n {%- if relation_alias %}{{ relation_alias }}.{% else %}{%- endif -%}\r\n {%- if quote_identifiers -%}\r\n {{ adapter.quote(col)|trim }} {%- if prefix!='' or suffix!='' %} as {{ adapter.quote(prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {%- else -%}\r\n {{ col|trim }} {%- if prefix!='' or suffix!='' %} as {{ (prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {% endif %}\r\n {%- if not loop.last %},{{ '\\n ' }}{%- endif -%}\r\n {%- endfor -%}\r\n {% endif %}\r\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.774365, "supported_languages": null}, "macro.dbt_utils.unpivot": {"name": "unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.unpivot", "macro_sql": "{% macro unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value', quote_identifiers=False) -%}\n {{ return(adapter.dispatch('unpivot', 'dbt_utils')(relation, cast_to, exclude, remove, field_name, value_name, quote_identifiers)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__unpivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.775907, "supported_languages": null}, "macro.dbt_utils.default__unpivot": {"name": "default__unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.default__unpivot", "macro_sql": "{% macro default__unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value', quote_identifiers=False) -%}\n\n {% if not relation %}\n {{ exceptions.raise_compiler_error(\"Error: argument `relation` is required for `unpivot` macro.\") }}\n {% endif %}\n\n {%- set exclude = exclude if exclude is not none else [] %}\n {%- set remove = remove if remove is not none else [] %}\n\n {%- set include_cols = [] %}\n\n {%- set table_columns = {} %}\n\n {%- do table_columns.update({relation: []}) %}\n\n {%- do dbt_utils._is_relation(relation, 'unpivot') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'unpivot') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) %}\n\n {%- for col in cols -%}\n {%- if col.column.lower() not in remove|map('lower') and col.column.lower() not in exclude|map('lower') -%}\n {% do include_cols.append(col) %}\n {%- endif %}\n {%- endfor %}\n\n\n {%- for col in include_cols -%}\n {%- set current_col_name = adapter.quote(col.column) if quote_identifiers else col.column -%}\n select\n {%- for exclude_col in exclude %}\n {{ adapter.quote(exclude_col) if quote_identifiers else exclude_col }},\n {%- endfor %}\n\n cast('{{ col.column }}' as {{ dbt.type_string() }}) as {{ adapter.quote(field_name) if quote_identifiers else field_name }},\n cast( {% if col.data_type == 'boolean' %}\n {{ dbt.cast_bool_to_text(current_col_name) }}\n {% else %}\n {{ current_col_name }}\n {% endif %}\n as {{ cast_to }}) as {{ adapter.quote(value_name) if quote_identifiers else value_name }}\n\n from {{ relation }}\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n {%- endfor -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.type_string", "macro.dbt.cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.777925, "supported_languages": null}, "macro.dbt_utils.safe_divide": {"name": "safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.safe_divide", "macro_sql": "{% macro safe_divide(numerator, denominator) -%}\n {{ return(adapter.dispatch('safe_divide', 'dbt_utils')(numerator, denominator)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_divide"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7782779, "supported_languages": null}, "macro.dbt_utils.default__safe_divide": {"name": "default__safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.default__safe_divide", "macro_sql": "{% macro default__safe_divide(numerator, denominator) %}\n ( {{ numerator }} ) / nullif( ( {{ denominator }} ), 0)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7784162, "supported_languages": null}, "macro.dbt_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n {{ return(adapter.dispatch('union_relations', 'dbt_utils')(relations, column_override, include, exclude, source_column_name, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.781806, "supported_languages": null}, "macro.dbt_utils.default__union_relations": {"name": "default__union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.default__union_relations", "macro_sql": "\n\n{%- macro default__union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n {%- set all_excludes = [] -%}\n {%- set all_includes = [] -%}\n\n {%- if exclude -%}\n {%- for exc in exclude -%}\n {%- do all_excludes.append(exc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- if include -%}\n {%- for inc in include -%}\n {%- do all_includes.append(inc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column | lower in all_excludes -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column | lower not in all_includes -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n {%- set dbt_command = flags.WHICH -%}\n\n\n {% if dbt_command in ['run', 'build'] %}\n {% if (include | length > 0 or exclude | length > 0) and not column_superset.keys() %}\n {%- set relations_string -%}\n {%- for relation in relations -%}\n {{ relation.name }}\n {%- if not loop.last %}, {% endif -%}\n {%- endfor -%}\n {%- endset -%}\n\n {%- set error_message -%}\n There were no columns found to union for relations {{ relations_string }}\n {%- endset -%}\n\n {{ exceptions.raise_compiler_error(error_message) }}\n {%- endif -%}\n {%- endif -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n {%- if source_column_name is not none %}\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {%- endif %}\n\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ relation }}\n\n {% if where -%}\n where {{ where }}\n {%- endif %}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.785173, "supported_languages": null}, "macro.dbt_utils.group_by": {"name": "group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.group_by", "macro_sql": "{%- macro group_by(n) -%}\n {{ return(adapter.dispatch('group_by', 'dbt_utils')(n)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__group_by"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7854989, "supported_languages": null}, "macro.dbt_utils.default__group_by": {"name": "default__group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.default__group_by", "macro_sql": "\n\n{%- macro default__group_by(n) -%}\n\n group by {% for i in range(1, n + 1) -%}\n {{ i }}{{ ',' if not loop.last }} \n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7858539, "supported_languages": null}, "macro.dbt_utils.deduplicate": {"name": "deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.deduplicate", "macro_sql": "{%- macro deduplicate(relation, partition_by, order_by) -%}\n {{ return(adapter.dispatch('deduplicate', 'dbt_utils')(relation, partition_by, order_by)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.postgres__deduplicate"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.786747, "supported_languages": null}, "macro.dbt_utils.default__deduplicate": {"name": "default__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.default__deduplicate", "macro_sql": "\n\n{%- macro default__deduplicate(relation, partition_by, order_by) -%}\n\n with row_numbered as (\n select\n _inner.*,\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) as rn\n from {{ relation }} as _inner\n )\n\n select\n distinct data.*\n from {{ relation }} as data\n {#\n -- Not all DBs will support natural joins but the ones that do include:\n -- Oracle, MySQL, SQLite, Redshift, Teradata, Materialize, Databricks\n -- Apache Spark, SingleStore, Vertica\n -- Those that do not appear to support natural joins include:\n -- SQLServer, Trino, Presto, Rockset, Athena\n #}\n natural join row_numbered\n where row_numbered.rn = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7869592, "supported_languages": null}, "macro.dbt_utils.redshift__deduplicate": {"name": "redshift__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.redshift__deduplicate", "macro_sql": "{% macro redshift__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }} as tt\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.787272, "supported_languages": null}, "macro.dbt_utils.postgres__deduplicate": {"name": "postgres__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.postgres__deduplicate", "macro_sql": "\n{%- macro postgres__deduplicate(relation, partition_by, order_by) -%}\n\n select\n distinct on ({{ partition_by }}) *\n from {{ relation }}\n order by {{ partition_by }}{{ ',' ~ order_by }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7875688, "supported_languages": null}, "macro.dbt_utils.snowflake__deduplicate": {"name": "snowflake__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.snowflake__deduplicate", "macro_sql": "\n{%- macro snowflake__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }}\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.78778, "supported_languages": null}, "macro.dbt_utils.databricks__deduplicate": {"name": "databricks__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.databricks__deduplicate", "macro_sql": "\n{%- macro databricks__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }}\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.787955, "supported_languages": null}, "macro.dbt_utils.bigquery__deduplicate": {"name": "bigquery__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.bigquery__deduplicate", "macro_sql": "\n{%- macro bigquery__deduplicate(relation, partition_by, order_by) -%}\n\n select unique.*\n from (\n select\n array_agg (\n original\n order by {{ order_by }}\n limit 1\n )[offset(0)] unique\n from {{ relation }} original\n group by {{ partition_by }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.788131, "supported_languages": null}, "macro.dbt_utils.surrogate_key": {"name": "surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.surrogate_key", "macro_sql": "{%- macro surrogate_key(field_list) -%}\n {% set frustrating_jinja_feature = varargs %}\n {{ return(adapter.dispatch('surrogate_key', 'dbt_utils')(field_list, *varargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.788575, "supported_languages": null}, "macro.dbt_utils.default__surrogate_key": {"name": "default__surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.default__surrogate_key", "macro_sql": "\n\n{%- macro default__surrogate_key(field_list) -%}\n\n{%- set error_message = '\nWarning: `dbt_utils.surrogate_key` has been replaced by \\\n`dbt_utils.generate_surrogate_key`. The new macro treats null values \\\ndifferently to empty strings. To restore the behaviour of the original \\\nmacro, add a global variable in dbt_project.yml called \\\n`surrogate_key_treat_nulls_as_empty_strings` to your \\\ndbt_project.yml file with a value of True. \\\nThe {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7888231, "supported_languages": null}, "macro.dbt_utils.safe_add": {"name": "safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.safe_add", "macro_sql": "{%- macro safe_add(field_list) -%}\n {{ return(adapter.dispatch('safe_add', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.78927, "supported_languages": null}, "macro.dbt_utils.default__safe_add": {"name": "default__safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.default__safe_add", "macro_sql": "\n\n{%- macro default__safe_add(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_add` macro now takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.warn(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' +\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.789786, "supported_languages": null}, "macro.dbt_utils.nullcheck": {"name": "nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.nullcheck", "macro_sql": "{% macro nullcheck(cols) %}\n {{ return(adapter.dispatch('nullcheck', 'dbt_utils')(cols)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.790112, "supported_languages": null}, "macro.dbt_utils.default__nullcheck": {"name": "default__nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.default__nullcheck", "macro_sql": "{% macro default__nullcheck(cols) %}\n{%- for col in cols %}\n\n {% if col.is_string() -%}\n\n nullif({{col.name}},'') as {{col.name}}\n\n {%- else -%}\n\n {{col.name}}\n\n {%- endif -%}\n\n{%- if not loop.last -%} , {%- endif -%}\n\n{%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.79042, "supported_languages": null}, "macro.dbt_utils.get_tables_by_pattern_sql": {"name": "get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_pattern_sql", "macro_sql": "{% macro get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_pattern_sql', 'dbt_utils')\n (schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.793671, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_pattern_sql": {"name": "default__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_pattern_sql", "macro_sql": "{% macro default__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n select distinct\n table_schema as {{ adapter.quote('table_schema') }},\n table_name as {{ adapter.quote('table_name') }},\n {{ dbt_utils.get_table_types_sql() }}\n from {{ database }}.information_schema.tables\n where table_schema ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.794013, "supported_languages": null}, "macro.dbt_utils.redshift__get_tables_by_pattern_sql": {"name": "redshift__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.redshift__get_tables_by_pattern_sql", "macro_sql": "{% macro redshift__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {% set sql %}\n select distinct\n table_schema as {{ adapter.quote('table_schema') }},\n table_name as {{ adapter.quote('table_name') }},\n {{ dbt_utils.get_table_types_sql() }}\n from \"{{ database }}\".\"information_schema\".\"tables\"\n where table_schema ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n union all\n select distinct\n schemaname as {{ adapter.quote('table_schema') }},\n tablename as {{ adapter.quote('table_name') }},\n 'external' as {{ adapter.quote('table_type') }}\n from svv_external_tables\n where redshift_database_name = '{{ database }}'\n and schemaname ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n {% endset %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.794935, "supported_languages": null}, "macro.dbt_utils.bigquery__get_tables_by_pattern_sql": {"name": "bigquery__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.bigquery__get_tables_by_pattern_sql", "macro_sql": "{% macro bigquery__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {% if '%' in schema_pattern %}\n {% set schemata=dbt_utils._bigquery__get_matching_schemata(schema_pattern, database) %}\n {% else %}\n {% set schemata=[schema_pattern] %}\n {% endif %}\n\n {% set sql %}\n {% for schema in schemata %}\n select distinct\n table_schema,\n table_name,\n {{ dbt_utils.get_table_types_sql() }}\n\n from {{ adapter.quote(database) }}.{{ schema }}.INFORMATION_SCHEMA.TABLES\n where lower(table_name) like lower ('{{ table_pattern }}')\n and lower(table_name) not like lower ('{{ exclude }}')\n\n {% if not loop.last %} union all {% endif %}\n\n {% endfor %}\n {% endset %}\n\n {{ return(sql) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._bigquery__get_matching_schemata", "macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7956128, "supported_languages": null}, "macro.dbt_utils._bigquery__get_matching_schemata": {"name": "_bigquery__get_matching_schemata", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils._bigquery__get_matching_schemata", "macro_sql": "{% macro _bigquery__get_matching_schemata(schema_pattern, database) %}\n {% if execute %}\n\n {% set sql %}\n select schema_name from {{ adapter.quote(database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like lower('{{ schema_pattern }}')\n {% endset %}\n\n {% set results=run_query(sql) %}\n\n {% set schemata=results.columns['schema_name'].values() %}\n\n {{ return(schemata) }}\n\n {% else %}\n\n {{ return([]) }}\n\n {% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7960792, "supported_languages": null}, "macro.dbt_utils.get_column_values": {"name": "get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.get_column_values", "macro_sql": "{% macro get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {{ return(adapter.dispatch('get_column_values', 'dbt_utils')(table, column, order_by, max_records, default, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_column_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.79733, "supported_languages": null}, "macro.dbt_utils.default__get_column_values": {"name": "default__get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.default__get_column_values", "macro_sql": "{% macro default__get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {% set default = [] if not default %}\n {{ return(default) }}\n {% endif %}\n\n {%- do dbt_utils._is_ephemeral(table, 'get_column_values') -%}\n\n {# Not all relations are tables. Renaming for internal clarity without breaking functionality for anyone using named arguments #}\n {# TODO: Change the method signature in a future 0.x.0 release #}\n {%- set target_relation = table -%}\n\n {# adapter.load_relation is a convenience wrapper to avoid building a Relation when we already have one #}\n {% set relation_exists = (load_relation(target_relation)) is not none %}\n\n {%- call statement('get_column_values', fetch_result=true) %}\n\n {%- if not relation_exists and default is none -%}\n\n {{ exceptions.raise_compiler_error(\"In get_column_values(): relation \" ~ target_relation ~ \" does not exist and no default value was provided.\") }}\n\n {%- elif not relation_exists and default is not none -%}\n\n {{ log(\"Relation \" ~ target_relation ~ \" does not exist. Returning the default value: \" ~ default) }}\n\n {{ return(default) }}\n\n {%- else -%}\n\n\n select\n {{ column }} as value\n\n from {{ target_relation }}\n\n {% if where is not none %}\n where {{ where }}\n {% endif %}\n\n group by {{ column }}\n order by {{ order_by }}\n\n {% if max_records is not none %}\n limit {{ max_records }}\n {% endif %}\n\n {% endif %}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_column_values') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values) }}\n {%- else -%}\n {{ return(default) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_ephemeral", "macro.dbt.load_relation", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.7990458, "supported_languages": null}, "macro.dbt_utils.pivot": {"name": "pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.pivot", "macro_sql": "{% macro pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {{ return(adapter.dispatch('pivot', 'dbt_utils')(column, values, alias, agg, cmp, prefix, suffix, then_value, else_value, quote_identifiers, distinct)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.800281, "supported_languages": null}, "macro.dbt_utils.default__pivot": {"name": "default__pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.default__pivot", "macro_sql": "{% macro default__pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {% for value in values %}\n {{ agg }}(\n {% if distinct %} distinct {% endif %}\n case\n when {{ column }} {{ cmp }} '{{ dbt.escape_single_quotes(value) }}'\n then {{ then_value }}\n else {{ else_value }}\n end\n )\n {% if alias %}\n {% if quote_identifiers %}\n as {{ adapter.quote(prefix ~ value ~ suffix) }}\n {% else %}\n as {{ dbt_utils.slugify(prefix ~ value ~ suffix) }}\n {% endif %}\n {% endif %}\n {% if not loop.last %},{% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.escape_single_quotes", "macro.dbt_utils.slugify"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.801087, "supported_languages": null}, "macro.dbt_utils.get_filtered_columns_in_relation": {"name": "get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.get_filtered_columns_in_relation", "macro_sql": "{% macro get_filtered_columns_in_relation(from, except=[]) -%}\n {{ return(adapter.dispatch('get_filtered_columns_in_relation', 'dbt_utils')(from, except)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.801541, "supported_languages": null}, "macro.dbt_utils.default__get_filtered_columns_in_relation": {"name": "default__get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.default__get_filtered_columns_in_relation", "macro_sql": "{% macro default__get_filtered_columns_in_relation(from, except=[]) -%}\n {%- do dbt_utils._is_relation(from, 'get_filtered_columns_in_relation') -%}\n {%- do dbt_utils._is_ephemeral(from, 'get_filtered_columns_in_relation') -%}\n\n {# -- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {{ return('') }}\n {% endif %}\n\n {%- set include_cols = [] %}\n {%- set cols = adapter.get_columns_in_relation(from) -%}\n {%- set except = except | map(\"lower\") | list %}\n {%- for col in cols -%}\n {%- if col.column|lower not in except -%}\n {% do include_cols.append(col.column) %}\n {%- endif %}\n {%- endfor %}\n\n {{ return(include_cols) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.802216, "supported_languages": null}, "macro.dbt_utils.width_bucket": {"name": "width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.width_bucket", "macro_sql": "{% macro width_bucket(expr, min_value, max_value, num_buckets) %}\n {{ return(adapter.dispatch('width_bucket', 'dbt_utils') (expr, min_value, max_value, num_buckets)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__width_bucket"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8029718, "supported_languages": null}, "macro.dbt_utils.default__width_bucket": {"name": "default__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.default__width_bucket", "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.803404, "supported_languages": null}, "macro.dbt_utils.snowflake__width_bucket": {"name": "snowflake__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.snowflake__width_bucket", "macro_sql": "{% macro snowflake__width_bucket(expr, min_value, max_value, num_buckets) %}\n width_bucket({{ expr }}, {{ min_value }}, {{ max_value }}, {{ num_buckets }} )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.803582, "supported_languages": null}, "macro.dbt_utils.get_query_results_as_dict": {"name": "get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.get_query_results_as_dict", "macro_sql": "{% macro get_query_results_as_dict(query) %}\n {{ return(adapter.dispatch('get_query_results_as_dict', 'dbt_utils')(query)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_query_results_as_dict"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.803956, "supported_languages": null}, "macro.dbt_utils.default__get_query_results_as_dict": {"name": "default__get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.default__get_query_results_as_dict", "macro_sql": "{% macro default__get_query_results_as_dict(query) %}\n\n{# This macro returns a dictionary of the form {column_name: (tuple_of_results)} #}\n\n {%- call statement('get_query_results', fetch_result=True,auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {% set sql_results={} %}\n\n {%- if execute -%}\n {% set sql_results_table = load_result('get_query_results').table.columns %}\n {% for column_name, column in sql_results_table.items() %}\n {% do sql_results.update({column_name: column.values()}) %}\n {% endfor %}\n {%- endif -%}\n\n {{ return(sql_results) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8045151, "supported_languages": null}, "macro.dbt_utils.generate_surrogate_key": {"name": "generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.generate_surrogate_key", "macro_sql": "{%- macro generate_surrogate_key(field_list) -%}\n {{ return(adapter.dispatch('generate_surrogate_key', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8049548, "supported_languages": null}, "macro.dbt_utils.default__generate_surrogate_key": {"name": "default__generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.default__generate_surrogate_key", "macro_sql": "\n\n{%- macro default__generate_surrogate_key(field_list) -%}\n\n{%- if var('surrogate_key_treat_nulls_as_empty_strings', False) -%}\n {%- set default_null_value = \"\" -%}\n{%- else -%}\n {%- set default_null_value = '_dbt_utils_surrogate_key_null_' -%}\n{%- endif -%}\n\n{%- set fields = [] -%}\n\n{%- for field in field_list -%}\n\n {%- do fields.append(\n \"coalesce(cast(\" ~ field ~ \" as \" ~ dbt.type_string() ~ \"), '\" ~ default_null_value ~\"')\"\n ) -%}\n\n {%- if not loop.last %}\n {%- do fields.append(\"'-'\") -%}\n {%- endif -%}\n\n{%- endfor -%}\n\n{{ dbt.hash(dbt.concat(fields)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.hash", "macro.dbt.concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.805551, "supported_languages": null}, "macro.dbt_utils.get_table_types_sql": {"name": "get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.get_table_types_sql", "macro_sql": "{%- macro get_table_types_sql() -%}\n {{ return(adapter.dispatch('get_table_types_sql', 'dbt_utils')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils.postgres__get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.806079, "supported_languages": null}, "macro.dbt_utils.default__get_table_types_sql": {"name": "default__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.default__get_table_types_sql", "macro_sql": "{% macro default__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'EXTERNAL TABLE' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8062139, "supported_languages": null}, "macro.dbt_utils.postgres__get_table_types_sql": {"name": "postgres__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.postgres__get_table_types_sql", "macro_sql": "{% macro postgres__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'FOREIGN' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.806346, "supported_languages": null}, "macro.dbt_utils.databricks__get_table_types_sql": {"name": "databricks__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.databricks__get_table_types_sql", "macro_sql": "{% macro databricks__get_table_types_sql() %}\n case table_type\n when 'MANAGED' then 'table'\n when 'BASE TABLE' then 'table'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.806481, "supported_languages": null}, "macro.dbt_utils.get_single_value": {"name": "get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.get_single_value", "macro_sql": "{% macro get_single_value(query, default=none) %}\n {{ return(adapter.dispatch('get_single_value', 'dbt_utils')(query, default)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_single_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8069718, "supported_languages": null}, "macro.dbt_utils.default__get_single_value": {"name": "default__get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.default__get_single_value", "macro_sql": "{% macro default__get_single_value(query, default) %}\n\n{# This macro returns the (0, 0) record in a query, i.e. the first row of the first column #}\n\n {%- call statement('get_query_result', fetch_result=True, auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {%- if execute -%}\n\n {% set r = load_result('get_query_result').table.columns[0].values() %}\n {% if r | length == 0 %}\n {% do print('Query `' ~ query ~ '` returned no rows. Using the default value: ' ~ default) %}\n {% set sql_result = default %}\n {% else %}\n {% set sql_result = r[0] %}\n {% endif %}\n \n {%- else -%}\n \n {% set sql_result = default %}\n \n {%- endif -%}\n\n {% do return(sql_result) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.808026, "supported_languages": null}, "macro.dbt_utils.degrees_to_radians": {"name": "degrees_to_radians", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.degrees_to_radians", "macro_sql": "{% macro degrees_to_radians(degrees) -%}\n acos(-1) * {{degrees}} / 180\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.809209, "supported_languages": null}, "macro.dbt_utils.haversine_distance": {"name": "haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.haversine_distance", "macro_sql": "{% macro haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n {{ return(adapter.dispatch('haversine_distance', 'dbt_utils')(lat1,lon1,lat2,lon2,unit)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__haversine_distance"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.809483, "supported_languages": null}, "macro.dbt_utils.default__haversine_distance": {"name": "default__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.default__haversine_distance", "macro_sql": "{% macro default__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n\n 2 * 3961 * asin(sqrt(power((sin(radians(({{ lat2 }} - {{ lat1 }}) / 2))), 2) +\n cos(radians({{lat1}})) * cos(radians({{lat2}})) *\n power((sin(radians(({{ lon2 }} - {{ lon1 }}) / 2))), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8100202, "supported_languages": null}, "macro.dbt_utils.bigquery__haversine_distance": {"name": "bigquery__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.bigquery__haversine_distance", "macro_sql": "{% macro bigquery__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{% set radians_lat1 = dbt_utils.degrees_to_radians(lat1) %}\n{% set radians_lat2 = dbt_utils.degrees_to_radians(lat2) %}\n{% set radians_lon1 = dbt_utils.degrees_to_radians(lon1) %}\n{% set radians_lon2 = dbt_utils.degrees_to_radians(lon2) %}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n 2 * 3961 * asin(sqrt(power(sin(({{ radians_lat2 }} - {{ radians_lat1 }}) / 2), 2) +\n cos({{ radians_lat1 }}) * cos({{ radians_lat2 }}) *\n power(sin(({{ radians_lon2 }} - {{ radians_lon1 }}) / 2), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.degrees_to_radians"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8107688, "supported_languages": null}, "macro.spark_utils.get_tables": {"name": "get_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_tables", "macro_sql": "{% macro get_tables(table_regex_pattern='.*') %}\n\n {% set tables = [] %}\n {% for database in spark__list_schemas('not_used') %}\n {% for table in spark__list_relations_without_caching(database[0]) %}\n {% set db_tablename = database[0] ~ \".\" ~ table[1] %}\n {% set is_match = modules.re.match(table_regex_pattern, db_tablename) %}\n {% if is_match %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('type', 'TYPE', 'Type'))|first %}\n {% if table_type[1]|lower != 'view' %}\n {{ tables.append(db_tablename) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% endfor %}\n {{ return(tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.815974, "supported_languages": null}, "macro.spark_utils.get_delta_tables": {"name": "get_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_delta_tables", "macro_sql": "{% macro get_delta_tables(table_regex_pattern='.*') %}\n\n {% set delta_tables = [] %}\n {% for db_tablename in get_tables(table_regex_pattern) %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('provider', 'PROVIDER', 'Provider'))|first %}\n {% if table_type[1]|lower == 'delta' %}\n {{ delta_tables.append(db_tablename) }}\n {% endif %}\n {% endfor %}\n {{ return(delta_tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.816612, "supported_languages": null}, "macro.spark_utils.get_statistic_columns": {"name": "get_statistic_columns", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_statistic_columns", "macro_sql": "{% macro get_statistic_columns(table) %}\n\n {% call statement('input_columns', fetch_result=True) %}\n SHOW COLUMNS IN {{ table }}\n {% endcall %}\n {% set input_columns = load_result('input_columns').table %}\n\n {% set output_columns = [] %}\n {% for column in input_columns %}\n {% call statement('column_information', fetch_result=True) %}\n DESCRIBE TABLE {{ table }} `{{ column[0] }}`\n {% endcall %}\n {% if not load_result('column_information').table[1][1].startswith('struct') and not load_result('column_information').table[1][1].startswith('array') %}\n {{ output_columns.append('`' ~ column[0] ~ '`') }}\n {% endif %}\n {% endfor %}\n {{ return(output_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.817649, "supported_languages": null}, "macro.spark_utils.spark_optimize_delta_tables": {"name": "spark_optimize_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_optimize_delta_tables", "macro_sql": "{% macro spark_optimize_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Optimizing \" ~ table) }}\n {% do run_query(\"optimize \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.818435, "supported_languages": null}, "macro.spark_utils.spark_vacuum_delta_tables": {"name": "spark_vacuum_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_vacuum_delta_tables", "macro_sql": "{% macro spark_vacuum_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Vacuuming \" ~ table) }}\n {% do run_query(\"vacuum \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8193111, "supported_languages": null}, "macro.spark_utils.spark_analyze_tables": {"name": "spark_analyze_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_analyze_tables", "macro_sql": "{% macro spark_analyze_tables(table_regex_pattern='.*') %}\n\n {% for table in get_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set columns = get_statistic_columns(table) | join(',') %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Analyzing \" ~ table) }}\n {% if columns != '' %}\n {% do run_query(\"analyze table \" ~ table ~ \" compute statistics for columns \" ~ columns) %}\n {% endif %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.spark_utils.get_statistic_columns", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8202581, "supported_languages": null}, "macro.spark_utils.spark__concat": {"name": "spark__concat", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/concat.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/concat.sql", "unique_id": "macro.spark_utils.spark__concat", "macro_sql": "{% macro spark__concat(fields) -%}\n concat({{ fields|join(', ') }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.820436, "supported_languages": null}, "macro.spark_utils.spark__type_numeric": {"name": "spark__type_numeric", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "unique_id": "macro.spark_utils.spark__type_numeric", "macro_sql": "{% macro spark__type_numeric() %}\n decimal(28, 6)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.82054, "supported_languages": null}, "macro.spark_utils.spark__dateadd": {"name": "spark__dateadd", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "unique_id": "macro.spark_utils.spark__dateadd", "macro_sql": "{% macro spark__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {%- set clock_component -%}\n {# make sure the dates + timestamps are real, otherwise raise an error asap #}\n to_unix_timestamp({{ spark_utils.assert_not_null('to_timestamp', from_date_or_timestamp) }})\n - to_unix_timestamp({{ spark_utils.assert_not_null('date', from_date_or_timestamp) }})\n {%- endset -%}\n\n {%- if datepart in ['day', 'week'] -%}\n \n {%- set multiplier = 7 if datepart == 'week' else 1 -%}\n\n to_timestamp(\n to_unix_timestamp(\n date_add(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ['month', 'quarter', 'year'] -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'month' -%} 1\n {%- elif datepart == 'quarter' -%} 3\n {%- elif datepart == 'year' -%} 12\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n to_unix_timestamp(\n add_months(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n {{ spark_utils.assert_not_null('to_unix_timestamp', from_date_or_timestamp) }}\n + cast({{interval}} * {{multiplier}} as int)\n )\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro dateadd not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8233302, "supported_languages": null}, "macro.spark_utils.spark__datediff": {"name": "spark__datediff", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datediff.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datediff.sql", "unique_id": "macro.spark_utils.spark__datediff", "macro_sql": "{% macro spark__datediff(first_date, second_date, datepart) %}\n\n {%- if datepart in ['day', 'week', 'month', 'quarter', 'year'] -%}\n \n {# make sure the dates are real, otherwise raise an error asap #}\n {% set first_date = spark_utils.assert_not_null('date', first_date) %}\n {% set second_date = spark_utils.assert_not_null('date', second_date) %}\n \n {%- endif -%}\n \n {%- if datepart == 'day' -%}\n \n datediff({{second_date}}, {{first_date}})\n \n {%- elif datepart == 'week' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(datediff({{second_date}}, {{first_date}})/7)\n else ceil(datediff({{second_date}}, {{first_date}})/7)\n end\n \n -- did we cross a week boundary (Sunday)?\n + case\n when {{first_date}} < {{second_date}} and dayofweek({{second_date}}) < dayofweek({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofweek({{second_date}}) > dayofweek({{first_date}}) then -1\n else 0 end\n\n {%- elif datepart == 'month' -%}\n\n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}})))\n else ceil(months_between(date({{second_date}}), date({{first_date}})))\n end\n \n -- did we cross a month boundary?\n + case\n when {{first_date}} < {{second_date}} and dayofmonth({{second_date}}) < dayofmonth({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofmonth({{second_date}}) > dayofmonth({{first_date}}) then -1\n else 0 end\n \n {%- elif datepart == 'quarter' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}}))/3)\n else ceil(months_between(date({{second_date}}), date({{first_date}}))/3)\n end\n \n -- did we cross a quarter boundary?\n + case\n when {{first_date}} < {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n < (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then 1\n when {{first_date}} > {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n > (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then -1\n else 0 end\n\n {%- elif datepart == 'year' -%}\n \n year({{second_date}}) - year({{first_date}})\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set divisor -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n case when {{first_date}} < {{second_date}}\n then ceil((\n {# make sure the timestamps are real, otherwise raise an error asap #}\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n else floor((\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n end\n \n {% if datepart == 'millisecond' %}\n + cast(date_format({{second_date}}, 'SSS') as int)\n - cast(date_format({{first_date}}, 'SSS') as int)\n {% endif %}\n \n {% if datepart == 'microsecond' %} \n {% set capture_str = '[0-9]{4}-[0-9]{2}-[0-9]{2}.[0-9]{2}:[0-9]{2}:[0-9]{2}.([0-9]{6})' %}\n -- Spark doesn't really support microseconds, so this is a massive hack!\n -- It will only work if the timestamp-string is of the format\n -- 'yyyy-MM-dd-HH mm.ss.SSSSSS'\n + cast(regexp_extract({{second_date}}, '{{capture_str}}', 1) as int)\n - cast(regexp_extract({{first_date}}, '{{capture_str}}', 1) as int) \n {% endif %}\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro datediff not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.83119, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp": {"name": "spark__current_timestamp", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp", "macro_sql": "{% macro spark__current_timestamp() %}\n current_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8313348, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp_in_utc": {"name": "spark__current_timestamp_in_utc", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp_in_utc", "macro_sql": "{% macro spark__current_timestamp_in_utc() %}\n unix_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.831409, "supported_languages": null}, "macro.spark_utils.spark__split_part": {"name": "spark__split_part", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/split_part.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/split_part.sql", "unique_id": "macro.spark_utils.spark__split_part", "macro_sql": "{% macro spark__split_part(string_text, delimiter_text, part_number) %}\n\n {% set delimiter_expr %}\n \n -- escape if starts with a special character\n case when regexp_extract({{ delimiter_text }}, '([^A-Za-z0-9])(.*)', 1) != '_'\n then concat('\\\\', {{ delimiter_text }})\n else {{ delimiter_text }} end\n \n {% endset %}\n\n {% set split_part_expr %}\n \n split(\n {{ string_text }},\n {{ delimiter_expr }}\n )[({{ part_number - 1 }})]\n \n {% endset %}\n \n {{ return(split_part_expr) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.831959, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_pattern": {"name": "spark__get_relations_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_pattern", "macro_sql": "{% macro spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n show table extended in {{ schema_pattern }} like '{{ table_pattern }}'\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=None,\n schema=row[0],\n identifier=row[1],\n type=('view' if 'Type: VIEW' in row[3] else 'table')\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.833505, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_prefix": {"name": "spark__get_relations_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_prefix", "macro_sql": "{% macro spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {% set table_pattern = table_pattern ~ '*' %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.833824, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_pattern": {"name": "spark__get_tables_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_pattern", "macro_sql": "{% macro spark__get_tables_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8340852, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_prefix": {"name": "spark__get_tables_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_prefix", "macro_sql": "{% macro spark__get_tables_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.834341, "supported_languages": null}, "macro.spark_utils.assert_not_null": {"name": "assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.assert_not_null", "macro_sql": "{% macro assert_not_null(function, arg) -%}\n {{ return(adapter.dispatch('assert_not_null', 'spark_utils')(function, arg)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.spark_utils.default__assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.834645, "supported_languages": null}, "macro.spark_utils.default__assert_not_null": {"name": "default__assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.default__assert_not_null", "macro_sql": "{% macro default__assert_not_null(function, arg) %}\n\n coalesce({{function}}({{arg}}), nvl2({{function}}({{arg}}), assert_true({{function}}({{arg}}) is not null), null))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.834836, "supported_languages": null}, "macro.spark_utils.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/snowplow/convert_timezone.sql", "original_file_path": "macros/snowplow/convert_timezone.sql", "unique_id": "macro.spark_utils.spark__convert_timezone", "macro_sql": "{% macro spark__convert_timezone(in_tz, out_tz, in_timestamp) %}\n from_utc_timestamp(to_utc_timestamp({{in_timestamp}}, {{in_tz}}), {{out_tz}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.835028, "supported_languages": null}, "macro.dbt_date.get_date_dimension": {"name": "get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.get_date_dimension", "macro_sql": "{% macro get_date_dimension(start_date, end_date) %}\n {{ adapter.dispatch('get_date_dimension', 'dbt_date') (start_date, end_date) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__get_date_dimension"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.844357, "supported_languages": null}, "macro.dbt_date.default__get_date_dimension": {"name": "default__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.default__get_date_dimension", "macro_sql": "{% macro default__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=false) }} as day_of_week,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week_iso,\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n cast({{ last_day('d.date_day', 'quarter') }} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.846505, "supported_languages": null}, "macro.dbt_date.postgres__get_date_dimension": {"name": "postgres__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.postgres__get_date_dimension", "macro_sql": "{% macro postgres__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week,\n\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n {# last_day does not support quarter because postgresql does not support quarter interval. #}\n cast({{dbt.dateadd('day', '-1', dbt.dateadd('month', '3', dbt.date_trunc('quarter', 'd.date_day')))}} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8489652, "supported_languages": null}, "macro.dbt_date.get_base_dates": {"name": "get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.get_base_dates", "macro_sql": "{% macro get_base_dates(start_date=None, end_date=None, n_dateparts=None, datepart=\"day\") %}\n {{ adapter.dispatch('get_base_dates', 'dbt_date') (start_date, end_date, n_dateparts, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_base_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.850685, "supported_languages": null}, "macro.dbt_date.default__get_base_dates": {"name": "default__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.default__get_base_dates", "macro_sql": "{% macro default__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.851424, "supported_languages": null}, "macro.dbt_date.bigquery__get_base_dates": {"name": "bigquery__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.bigquery__get_base_dates", "macro_sql": "{% macro bigquery__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as datetime )\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as datetime )\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.852235, "supported_languages": null}, "macro.dbt_date.trino__get_base_dates": {"name": "trino__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.trino__get_base_dates", "macro_sql": "{% macro trino__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.now()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.now", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.852952, "supported_languages": null}, "macro.dbt_date.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_date')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.853652, "supported_languages": null}, "macro.dbt_date.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.854202, "supported_languages": null}, "macro.dbt_date.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_date')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.854419, "supported_languages": null}, "macro.dbt_date.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{\n dbt_date.generate_series(\n dbt_date.get_intervals_between(start_date, end_date, datepart)\n )\n }}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"(row_number() over (order by 1) - 1)\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.generate_series", "macro.dbt_date.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.854762, "supported_languages": null}, "macro.dbt_date.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.855572, "supported_languages": null}, "macro.dbt_date.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.855958, "supported_languages": null}, "macro.dbt_date.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8561332, "supported_languages": null}, "macro.dbt_date.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_date.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.856618, "supported_languages": null}, "macro.dbt_date.date": {"name": "date", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.date", "macro_sql": "{% macro date(year, month, day) %}\n {{ return(modules.datetime.date(year, month, day)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.856937, "supported_languages": null}, "macro.dbt_date.datetime": {"name": "datetime", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.datetime", "macro_sql": "{% macro datetime(year, month, day, hour=0, minute=0, second=0, microsecond=0, tz=None) %}\n {% set tz = tz if tz else var(\"dbt_date:time_zone\") %}\n {{ return(\n modules.datetime.datetime(\n year=year, month=month, day=day, hour=hour,\n minute=minute, second=second, microsecond=microsecond,\n tzinfo=modules.pytz.timezone(tz)\n )\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.857451, "supported_languages": null}, "macro.dbt_date.get_fiscal_year_dates": {"name": "get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.get_fiscal_year_dates", "macro_sql": "{% macro get_fiscal_year_dates(dates, year_end_month=12, week_start_day=1, shift_year=1) %}\n{{ adapter.dispatch('get_fiscal_year_dates', 'dbt_date') (dates, year_end_month, week_start_day, shift_year) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_fiscal_year_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.859603, "supported_languages": null}, "macro.dbt_date.default__get_fiscal_year_dates": {"name": "default__get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.default__get_fiscal_year_dates", "macro_sql": "{% macro default__get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) %}\n-- this gets all the dates within a fiscal year\n-- determined by the given year-end-month\n-- ending on the saturday closest to that month's end date\nwith fsc_date_dimension as (\n select * from {{ dates }}\n),\nyear_month_end as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.month_end_date\n from\n fsc_date_dimension d\n where\n d.month_of_year = {{ year_end_month }}\n group by 1,2\n\n),\nweeks as (\n\n select\n d.year_number,\n d.month_of_year,\n d.date_day as week_start_date,\n cast({{ dbt.dateadd('day', 6, 'd.date_day') }} as date) as week_end_date\n from\n fsc_date_dimension d\n where\n d.day_of_week = {{ week_start_day }}\n\n),\n-- get all the weeks that start in the month the year ends\nyear_week_ends as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.week_end_date\n from\n weeks d\n where\n d.month_of_year = {{ year_end_month }}\n group by\n 1,2\n\n),\n-- then calculate which Saturday is closest to month end\nweeks_at_month_end as (\n\n select\n d.fiscal_year_number,\n d.week_end_date,\n m.month_end_date,\n rank() over\n (partition by d.fiscal_year_number\n order by\n abs({{ dbt.datediff('d.week_end_date', 'm.month_end_date', 'day') }})\n\n ) as closest_to_month_end\n from\n year_week_ends d\n join\n year_month_end m on d.fiscal_year_number = m.fiscal_year_number\n),\nfiscal_year_range as (\n\n select\n w.fiscal_year_number,\n cast(\n {{ dbt.dateadd('day', 1,\n 'lag(w.week_end_date) over(order by w.week_end_date)') }}\n as date) as fiscal_year_start_date,\n w.week_end_date as fiscal_year_end_date\n from\n weeks_at_month_end w\n where\n w.closest_to_month_end = 1\n\n),\nfiscal_year_dates as (\n\n select\n d.date_day,\n m.fiscal_year_number,\n m.fiscal_year_start_date,\n m.fiscal_year_end_date,\n w.week_start_date,\n w.week_end_date,\n -- we reset the weeks of the year starting with the merch year start date\n dense_rank()\n over(\n partition by m.fiscal_year_number\n order by w.week_start_date\n ) as fiscal_week_of_year\n from\n fsc_date_dimension d\n join\n fiscal_year_range m on d.date_day between m.fiscal_year_start_date and m.fiscal_year_end_date\n join\n weeks w on d.date_day between w.week_start_date and w.week_end_date\n\n)\nselect * from fiscal_year_dates order by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.860286, "supported_languages": null}, "macro.dbt_date.get_fiscal_periods": {"name": "get_fiscal_periods", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_periods.sql", "original_file_path": "macros/fiscal_date/get_fiscal_periods.sql", "unique_id": "macro.dbt_date.get_fiscal_periods", "macro_sql": "{% macro get_fiscal_periods(dates, year_end_month, week_start_day, shift_year=1) %}\n{#\nThis macro requires you to pass in a ref to a date dimension, created via\ndbt_date.get_date_dimension()s\n#}\nwith fscl_year_dates_for_periods as (\n {{ dbt_date.get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) }}\n),\nfscl_year_w13 as (\n\n select\n f.*,\n -- We count the weeks in a 13 week period\n -- and separate the 4-5-4 week sequences\n mod(cast(\n (f.fiscal_week_of_year-1) as {{ dbt.type_int() }}\n ), 13) as w13_number,\n -- Chop weeks into 13 week merch quarters\n cast(\n least(\n floor((f.fiscal_week_of_year-1)/13.0)\n , 3)\n as {{ dbt.type_int() }}) as quarter_number\n from\n fscl_year_dates_for_periods f\n\n),\nfscl_periods as (\n\n select\n f.date_day,\n f.fiscal_year_number,\n f.week_start_date,\n f.week_end_date,\n f.fiscal_week_of_year,\n case\n -- we move week 53 into the 3rd period of the quarter\n when f.fiscal_week_of_year = 53 then 3\n when f.w13_number between 0 and 3 then 1\n when f.w13_number between 4 and 8 then 2\n when f.w13_number between 9 and 12 then 3\n end as period_of_quarter,\n f.quarter_number\n from\n fscl_year_w13 f\n\n),\nfscl_periods_quarters as (\n\n select\n f.*,\n cast((\n (f.quarter_number * 3) + f.period_of_quarter\n ) as {{ dbt.type_int() }}) as fiscal_period_number\n from\n fscl_periods f\n\n)\nselect\n date_day,\n fiscal_year_number,\n week_start_date,\n week_end_date,\n fiscal_week_of_year,\n dense_rank() over(partition by fiscal_period_number order by fiscal_week_of_year) as fiscal_week_of_period,\n fiscal_period_number,\n quarter_number+1 as fiscal_quarter_number,\n period_of_quarter as fiscal_period_of_quarter\nfrom\n fscl_periods_quarters\norder by 1,2\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_fiscal_year_dates", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8613272, "supported_languages": null}, "macro.dbt_date.tomorrow": {"name": "tomorrow", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/tomorrow.sql", "original_file_path": "macros/calendar_date/tomorrow.sql", "unique_id": "macro.dbt_date.tomorrow", "macro_sql": "{%- macro tomorrow(date=None, tz=None) -%}\n{{ dbt_date.n_days_away(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8615942, "supported_languages": null}, "macro.dbt_date.next_week": {"name": "next_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_week.sql", "original_file_path": "macros/calendar_date/next_week.sql", "unique_id": "macro.dbt_date.next_week", "macro_sql": "{%- macro next_week(tz=None) -%}\n{{ dbt_date.n_weeks_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8617868, "supported_languages": null}, "macro.dbt_date.next_month_name": {"name": "next_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_name.sql", "original_file_path": "macros/calendar_date/next_month_name.sql", "unique_id": "macro.dbt_date.next_month_name", "macro_sql": "{%- macro next_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.next_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.862169, "supported_languages": null}, "macro.dbt_date.next_month": {"name": "next_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month.sql", "original_file_path": "macros/calendar_date/next_month.sql", "unique_id": "macro.dbt_date.next_month", "macro_sql": "{%- macro next_month(tz=None) -%}\n{{ dbt_date.n_months_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.862371, "supported_languages": null}, "macro.dbt_date.day_name": {"name": "day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.day_name", "macro_sql": "{%- macro day_name(date, short=True) -%}\n {{ adapter.dispatch('day_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.863244, "supported_languages": null}, "macro.dbt_date.default__day_name": {"name": "default__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.default__day_name", "macro_sql": "\n\n{%- macro default__day_name(date, short) -%}\n{%- set f = 'Dy' if short else 'Day' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.863463, "supported_languages": null}, "macro.dbt_date.snowflake__day_name": {"name": "snowflake__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.snowflake__day_name", "macro_sql": "\n\n{%- macro snowflake__day_name(date, short) -%}\n {%- if short -%}\n dayname({{ date }})\n {%- else -%}\n -- long version not implemented on Snowflake so we're doing it manually :/\n case dayname({{ date }})\n when 'Mon' then 'Monday'\n when 'Tue' then 'Tuesday'\n when 'Wed' then 'Wednesday'\n when 'Thu' then 'Thursday'\n when 'Fri' then 'Friday'\n when 'Sat' then 'Saturday'\n when 'Sun' then 'Sunday'\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.863741, "supported_languages": null}, "macro.dbt_date.bigquery__day_name": {"name": "bigquery__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.bigquery__day_name", "macro_sql": "\n\n{%- macro bigquery__day_name(date, short) -%}\n{%- set f = '%a' if short else '%A' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8639429, "supported_languages": null}, "macro.dbt_date.postgres__day_name": {"name": "postgres__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.postgres__day_name", "macro_sql": "\n\n{%- macro postgres__day_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMDy' if short else 'FMDay' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8641331, "supported_languages": null}, "macro.dbt_date.duckdb__day_name": {"name": "duckdb__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.duckdb__day_name", "macro_sql": "\n\n{%- macro duckdb__day_name(date, short) -%}\n {%- if short -%}\n substr(dayname({{ date }}), 1, 3)\n {%- else -%}\n dayname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.864307, "supported_languages": null}, "macro.dbt_date.spark__day_name": {"name": "spark__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.spark__day_name", "macro_sql": "\n\n{%- macro spark__day_name(date, short) -%}\n{%- set f = 'E' if short else 'EEEE' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8644931, "supported_languages": null}, "macro.dbt_date.trino__day_name": {"name": "trino__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.trino__day_name", "macro_sql": "\n\n{%- macro trino__day_name(date, short) -%}\n{%- set f = 'a' if short else 'W' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.864685, "supported_languages": null}, "macro.dbt_date.to_unixtimestamp": {"name": "to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.to_unixtimestamp", "macro_sql": "{%- macro to_unixtimestamp(timestamp) -%}\n {{ adapter.dispatch('to_unixtimestamp', 'dbt_date') (timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__to_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.865084, "supported_languages": null}, "macro.dbt_date.default__to_unixtimestamp": {"name": "default__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__to_unixtimestamp", "macro_sql": "\n\n{%- macro default__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8652182, "supported_languages": null}, "macro.dbt_date.snowflake__to_unixtimestamp": {"name": "snowflake__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__to_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch_seconds', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8653498, "supported_languages": null}, "macro.dbt_date.bigquery__to_unixtimestamp": {"name": "bigquery__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__to_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__to_unixtimestamp(timestamp) -%}\n unix_seconds({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.86545, "supported_languages": null}, "macro.dbt_date.spark__to_unixtimestamp": {"name": "spark__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.spark__to_unixtimestamp", "macro_sql": "\n\n{%- macro spark__to_unixtimestamp(timestamp) -%}\n unix_timestamp({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.865546, "supported_languages": null}, "macro.dbt_date.trino__to_unixtimestamp": {"name": "trino__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__to_unixtimestamp", "macro_sql": "\n\n{%- macro trino__to_unixtimestamp(timestamp) -%}\n to_unixtime({{ timestamp }} AT TIME ZONE 'UTC')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8656409, "supported_languages": null}, "macro.dbt_date.n_days_away": {"name": "n_days_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_away.sql", "original_file_path": "macros/calendar_date/n_days_away.sql", "unique_id": "macro.dbt_date.n_days_away", "macro_sql": "{%- macro n_days_away(n, date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(-1 * n, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.865872, "supported_languages": null}, "macro.dbt_date.week_start": {"name": "week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.week_start", "macro_sql": "{%- macro week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.866352, "supported_languages": null}, "macro.dbt_date.default__week_start": {"name": "default__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.default__week_start", "macro_sql": "{%- macro default__week_start(date) -%}\ncast({{ dbt.date_trunc('week', date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8664918, "supported_languages": null}, "macro.dbt_date.snowflake__week_start": {"name": "snowflake__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.snowflake__week_start", "macro_sql": "\n\n{%- macro snowflake__week_start(date) -%}\n {#\n Get the day of week offset: e.g. if the date is a Sunday,\n dbt_date.day_of_week returns 1, so we subtract 1 to get a 0 offset\n #}\n {% set off_set = dbt_date.day_of_week(date, isoweek=False) ~ \" - 1\" %}\n cast({{ dbt.dateadd(\"day\", \"-1 * (\" ~ off_set ~ \")\", date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.day_of_week", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.866833, "supported_languages": null}, "macro.dbt_date.postgres__week_start": {"name": "postgres__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.postgres__week_start", "macro_sql": "\n\n{%- macro postgres__week_start(date) -%}\n-- Sunday as week start date\ncast({{ dbt.dateadd('day', -1, dbt.date_trunc('week', dbt.dateadd('day', 1, date))) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8670778, "supported_languages": null}, "macro.dbt_date.duckdb__week_start": {"name": "duckdb__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.duckdb__week_start", "macro_sql": "\n\n{%- macro duckdb__week_start(date) -%}\n{{ return(dbt_date.postgres__week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.867218, "supported_languages": null}, "macro.dbt_date.iso_week_start": {"name": "iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.iso_week_start", "macro_sql": "{%- macro iso_week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868084, "supported_languages": null}, "macro.dbt_date._iso_week_start": {"name": "_iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date._iso_week_start", "macro_sql": "{%- macro _iso_week_start(date, week_type) -%}\ncast({{ dbt.date_trunc(week_type, date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868285, "supported_languages": null}, "macro.dbt_date.default__iso_week_start": {"name": "default__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.default__iso_week_start", "macro_sql": "\n\n{%- macro default__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868448, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_start": {"name": "snowflake__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_start", "macro_sql": "\n\n{%- macro snowflake__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868599, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_start": {"name": "postgres__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.postgres__iso_week_start", "macro_sql": "\n\n{%- macro postgres__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868748, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_start": {"name": "duckdb__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_start", "macro_sql": "\n\n{%- macro duckdb__iso_week_start(date) -%}\n{{ return(dbt_date.postgres__iso_week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.868906, "supported_languages": null}, "macro.dbt_date.spark__iso_week_start": {"name": "spark__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.spark__iso_week_start", "macro_sql": "\n\n{%- macro spark__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8691032, "supported_languages": null}, "macro.dbt_date.trino__iso_week_start": {"name": "trino__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.trino__iso_week_start", "macro_sql": "\n\n{%- macro trino__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8692951, "supported_languages": null}, "macro.dbt_date.n_days_ago": {"name": "n_days_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_ago.sql", "original_file_path": "macros/calendar_date/n_days_ago.sql", "unique_id": "macro.dbt_date.n_days_ago", "macro_sql": "{%- macro n_days_ago(n, date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{%- set n = n|int -%}\ncast({{ dbt.dateadd('day', -1 * n, dt) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.869802, "supported_languages": null}, "macro.dbt_date.last_week": {"name": "last_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_week.sql", "original_file_path": "macros/calendar_date/last_week.sql", "unique_id": "macro.dbt_date.last_week", "macro_sql": "{%- macro last_week(tz=None) -%}\n{{ dbt_date.n_weeks_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.869994, "supported_languages": null}, "macro.dbt_date.now": {"name": "now", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/now.sql", "original_file_path": "macros/calendar_date/now.sql", "unique_id": "macro.dbt_date.now", "macro_sql": "{%- macro now(tz=None) -%}\n{{ dbt_date.convert_timezone(dbt.current_timestamp(), tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.convert_timezone", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8701801, "supported_languages": null}, "macro.dbt_date.periods_since": {"name": "periods_since", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/periods_since.sql", "original_file_path": "macros/calendar_date/periods_since.sql", "unique_id": "macro.dbt_date.periods_since", "macro_sql": "{%- macro periods_since(date_col, period_name='day', tz=None) -%}\n{{ dbt.datediff(date_col, dbt_date.now(tz), period_name) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.870424, "supported_languages": null}, "macro.dbt_date.today": {"name": "today", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/today.sql", "original_file_path": "macros/calendar_date/today.sql", "unique_id": "macro.dbt_date.today", "macro_sql": "{%- macro today(tz=None) -%}\ncast({{ dbt_date.now(tz) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.870589, "supported_languages": null}, "macro.dbt_date.last_month": {"name": "last_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month.sql", "original_file_path": "macros/calendar_date/last_month.sql", "unique_id": "macro.dbt_date.last_month", "macro_sql": "{%- macro last_month(tz=None) -%}\n{{ dbt_date.n_months_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.870763, "supported_languages": null}, "macro.dbt_date.day_of_year": {"name": "day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.day_of_year", "macro_sql": "{%- macro day_of_year(date) -%}\n{{ adapter.dispatch('day_of_year', 'dbt_date') (date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.871166, "supported_languages": null}, "macro.dbt_date.default__day_of_year": {"name": "default__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.default__day_of_year", "macro_sql": "\n\n{%- macro default__day_of_year(date) -%}\n {{ dbt_date.date_part('dayofyear', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.871304, "supported_languages": null}, "macro.dbt_date.postgres__day_of_year": {"name": "postgres__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.postgres__day_of_year", "macro_sql": "\n\n{%- macro postgres__day_of_year(date) -%}\n {{ dbt_date.date_part('doy', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8714392, "supported_languages": null}, "macro.dbt_date.redshift__day_of_year": {"name": "redshift__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.redshift__day_of_year", "macro_sql": "\n\n{%- macro redshift__day_of_year(date) -%}\n cast({{ dbt_date.date_part('dayofyear', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8716109, "supported_languages": null}, "macro.dbt_date.spark__day_of_year": {"name": "spark__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.spark__day_of_year", "macro_sql": "\n\n{%- macro spark__day_of_year(date) -%}\n dayofyear({{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.871788, "supported_languages": null}, "macro.dbt_date.trino__day_of_year": {"name": "trino__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.trino__day_of_year", "macro_sql": "\n\n{%- macro trino__day_of_year(date) -%}\n {{ dbt_date.date_part('day_of_year', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8719249, "supported_languages": null}, "macro.dbt_date.round_timestamp": {"name": "round_timestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/round_timestamp.sql", "original_file_path": "macros/calendar_date/round_timestamp.sql", "unique_id": "macro.dbt_date.round_timestamp", "macro_sql": "{% macro round_timestamp(timestamp) %}\n {{ dbt.date_trunc(\"day\", dbt.dateadd(\"hour\", 12, timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8721569, "supported_languages": null}, "macro.dbt_date.from_unixtimestamp": {"name": "from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.from_unixtimestamp", "macro_sql": "{%- macro from_unixtimestamp(epochs, format=\"seconds\") -%}\n {{ adapter.dispatch('from_unixtimestamp', 'dbt_date') (epochs, format) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__from_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.874784, "supported_languages": null}, "macro.dbt_date.default__from_unixtimestamp": {"name": "default__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__from_unixtimestamp", "macro_sql": "\n\n{%- macro default__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp({{ epochs }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.87505, "supported_languages": null}, "macro.dbt_date.postgres__from_unixtimestamp": {"name": "postgres__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.postgres__from_unixtimestamp", "macro_sql": "\n\n{%- macro postgres__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8753102, "supported_languages": null}, "macro.dbt_date.snowflake__from_unixtimestamp": {"name": "snowflake__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__from_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n {%- set scale = 0 -%}\n {%- elif format == \"milliseconds\" -%}\n {%- set scale = 3 -%}\n {%- elif format == \"microseconds\" -%}\n {%- set scale = 6 -%}\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp_ntz({{ epochs }}, {{ scale }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.875747, "supported_languages": null}, "macro.dbt_date.bigquery__from_unixtimestamp": {"name": "bigquery__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__from_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n timestamp_seconds({{ epochs }})\n {%- elif format == \"milliseconds\" -%}\n timestamp_millis({{ epochs }})\n {%- elif format == \"microseconds\" -%}\n timestamp_micros({{ epochs }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8761091, "supported_languages": null}, "macro.dbt_date.trino__from_unixtimestamp": {"name": "trino__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__from_unixtimestamp", "macro_sql": "\n\n{%- macro trino__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n cast(from_unixtime({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"milliseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 6)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"microseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 3)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"nanoseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8766668, "supported_languages": null}, "macro.dbt_date.duckdb__from_unixtimestamp": {"name": "duckdb__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.duckdb__from_unixtimestamp", "macro_sql": "\n\n\n{%- macro duckdb__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.876918, "supported_languages": null}, "macro.dbt_date.n_months_ago": {"name": "n_months_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_ago.sql", "original_file_path": "macros/calendar_date/n_months_ago.sql", "unique_id": "macro.dbt_date.n_months_ago", "macro_sql": "{%- macro n_months_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.87727, "supported_languages": null}, "macro.dbt_date.date_part": {"name": "date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.date_part", "macro_sql": "{% macro date_part(datepart, date) -%}\n {{ adapter.dispatch('date_part', 'dbt_date') (datepart, date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.877603, "supported_languages": null}, "macro.dbt_date.default__date_part": {"name": "default__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.default__date_part", "macro_sql": "{% macro default__date_part(datepart, date) -%}\n date_part('{{ datepart }}', {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.877815, "supported_languages": null}, "macro.dbt_date.bigquery__date_part": {"name": "bigquery__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.bigquery__date_part", "macro_sql": "{% macro bigquery__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8779898, "supported_languages": null}, "macro.dbt_date.trino__date_part": {"name": "trino__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.trino__date_part", "macro_sql": "{% macro trino__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.878127, "supported_languages": null}, "macro.dbt_date.n_weeks_away": {"name": "n_weeks_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_away.sql", "original_file_path": "macros/calendar_date/n_weeks_away.sql", "unique_id": "macro.dbt_date.n_weeks_away", "macro_sql": "{%- macro n_weeks_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.878518, "supported_languages": null}, "macro.dbt_date.day_of_month": {"name": "day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.day_of_month", "macro_sql": "{%- macro day_of_month(date) -%}\n{{ dbt_date.date_part('day', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8787389, "supported_languages": null}, "macro.dbt_date.redshift__day_of_month": {"name": "redshift__day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.redshift__day_of_month", "macro_sql": "\n\n{%- macro redshift__day_of_month(date) -%}\ncast({{ dbt_date.date_part('day', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.878942, "supported_languages": null}, "macro.dbt_date.yesterday": {"name": "yesterday", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/yesterday.sql", "original_file_path": "macros/calendar_date/yesterday.sql", "unique_id": "macro.dbt_date.yesterday", "macro_sql": "{%- macro yesterday(date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.879159, "supported_languages": null}, "macro.dbt_date.day_of_week": {"name": "day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.day_of_week", "macro_sql": "{%- macro day_of_week(date, isoweek=true) -%}\n{{ adapter.dispatch('day_of_week', 'dbt_date') (date, isoweek) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.882108, "supported_languages": null}, "macro.dbt_date.default__day_of_week": {"name": "default__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.default__day_of_week", "macro_sql": "\n\n{%- macro default__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else {{ dow }}\n end\n {%- else -%}\n {{ dow }} + 1\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.882408, "supported_languages": null}, "macro.dbt_date.snowflake__day_of_week": {"name": "snowflake__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.snowflake__day_of_week", "macro_sql": "\n\n{%- macro snowflake__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'dayofweekiso' -%}\n {{ dbt_date.date_part(dow_part, date) }}\n {%- else -%}\n {%- set dow_part = 'dayofweek' -%}\n case\n when {{ dbt_date.date_part(dow_part, date) }} = 7 then 1\n else {{ dbt_date.date_part(dow_part, date) }} + 1\n end\n {%- endif -%}\n\n\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8828, "supported_languages": null}, "macro.dbt_date.bigquery__day_of_week": {"name": "bigquery__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.bigquery__day_of_week", "macro_sql": "\n\n{%- macro bigquery__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (1) to Monday (2)\n when {{ dow }} = 1 then 7\n else {{ dow }} - 1\n end\n {%- else -%}\n {{ dow }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.883073, "supported_languages": null}, "macro.dbt_date.postgres__day_of_week": {"name": "postgres__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.postgres__day_of_week", "macro_sql": "\n\n\n{%- macro postgres__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'isodow' -%}\n -- Monday(1) to Sunday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} as {{ dbt.type_int() }})\n {%- else -%}\n {%- set dow_part = 'dow' -%}\n -- Sunday(1) to Saturday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} + 1 as {{ dbt.type_int() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.883479, "supported_languages": null}, "macro.dbt_date.redshift__day_of_week": {"name": "redshift__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.redshift__day_of_week", "macro_sql": "\n\n\n{%- macro redshift__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else cast({{ dow }} as {{ dbt.type_bigint() }})\n end\n {%- else -%}\n cast({{ dow }} + 1 as {{ dbt.type_bigint() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.883832, "supported_languages": null}, "macro.dbt_date.duckdb__day_of_week": {"name": "duckdb__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.duckdb__day_of_week", "macro_sql": "\n\n{%- macro duckdb__day_of_week(date, isoweek) -%}\n{{ return(dbt_date.postgres__day_of_week(date, isoweek)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.883997, "supported_languages": null}, "macro.dbt_date.spark__day_of_week": {"name": "spark__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.spark__day_of_week", "macro_sql": "\n\n\n{%- macro spark__day_of_week(date, isoweek) -%}\n\n {%- set dow = \"dayofweek_iso\" if isoweek else \"dayofweek\" -%}\n\n {{ dbt_date.date_part(dow, date) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.884205, "supported_languages": null}, "macro.dbt_date.trino__day_of_week": {"name": "trino__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.trino__day_of_week", "macro_sql": "\n\n\n{%- macro trino__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('day_of_week', date) -%}\n\n {%- if isoweek -%}\n {{ dow }}\n {%- else -%}\n case\n when {{ dow }} = 7 then 1\n else {{ dow }} + 1\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.884474, "supported_languages": null}, "macro.dbt_date.iso_week_end": {"name": "iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.iso_week_end", "macro_sql": "{%- macro iso_week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.default__iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8849258, "supported_languages": null}, "macro.dbt_date._iso_week_end": {"name": "_iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date._iso_week_end", "macro_sql": "{%- macro _iso_week_end(date, week_type) -%}\n{%- set dt = dbt_date.iso_week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.iso_week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.885154, "supported_languages": null}, "macro.dbt_date.default__iso_week_end": {"name": "default__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.default__iso_week_end", "macro_sql": "\n\n{%- macro default__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.885354, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_end": {"name": "snowflake__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_end", "macro_sql": "\n\n{%- macro snowflake__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.885514, "supported_languages": null}, "macro.dbt_date.n_weeks_ago": {"name": "n_weeks_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_ago.sql", "original_file_path": "macros/calendar_date/n_weeks_ago.sql", "unique_id": "macro.dbt_date.n_weeks_ago", "macro_sql": "{%- macro n_weeks_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8859, "supported_languages": null}, "macro.dbt_date.month_name": {"name": "month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.month_name", "macro_sql": "{%- macro month_name(date, short=True) -%}\n {{ adapter.dispatch('month_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__month_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8865972, "supported_languages": null}, "macro.dbt_date.default__month_name": {"name": "default__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.default__month_name", "macro_sql": "\n\n{%- macro default__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MONTH' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.886806, "supported_languages": null}, "macro.dbt_date.bigquery__month_name": {"name": "bigquery__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.bigquery__month_name", "macro_sql": "\n\n{%- macro bigquery__month_name(date, short) -%}\n{%- set f = '%b' if short else '%B' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.887115, "supported_languages": null}, "macro.dbt_date.snowflake__month_name": {"name": "snowflake__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.snowflake__month_name", "macro_sql": "\n\n{%- macro snowflake__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MMMM' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.887398, "supported_languages": null}, "macro.dbt_date.postgres__month_name": {"name": "postgres__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.postgres__month_name", "macro_sql": "\n\n{%- macro postgres__month_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMMon' if short else 'FMMonth' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.887707, "supported_languages": null}, "macro.dbt_date.duckdb__month_name": {"name": "duckdb__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.duckdb__month_name", "macro_sql": "\n\n\n{%- macro duckdb__month_name(date, short) -%}\n {%- if short -%}\n substr(monthname({{ date }}), 1, 3)\n {%- else -%}\n monthname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.887923, "supported_languages": null}, "macro.dbt_date.spark__month_name": {"name": "spark__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.spark__month_name", "macro_sql": "\n\n{%- macro spark__month_name(date, short) -%}\n{%- set f = 'MMM' if short else 'MMMM' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.888222, "supported_languages": null}, "macro.dbt_date.trino__month_name": {"name": "trino__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.trino__month_name", "macro_sql": "\n\n{%- macro trino__month_name(date, short) -%}\n{%- set f = 'b' if short else 'M' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.888424, "supported_languages": null}, "macro.dbt_date.last_month_name": {"name": "last_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_name.sql", "original_file_path": "macros/calendar_date/last_month_name.sql", "unique_id": "macro.dbt_date.last_month_name", "macro_sql": "{%- macro last_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.last_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.888675, "supported_languages": null}, "macro.dbt_date.week_of_year": {"name": "week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.week_of_year", "macro_sql": "{%- macro week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.889104, "supported_languages": null}, "macro.dbt_date.default__week_of_year": {"name": "default__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.default__week_of_year", "macro_sql": "{%- macro default__week_of_year(date) -%}\ncast({{ dbt_date.date_part('week', date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.88928, "supported_languages": null}, "macro.dbt_date.postgres__week_of_year": {"name": "postgres__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.postgres__week_of_year", "macro_sql": "\n\n{%- macro postgres__week_of_year(date) -%}\n{# postgresql 'week' returns isoweek. Use to_char instead.\n WW = the first week starts on the first day of the year #}\ncast(to_char({{ date }}, 'WW') as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8894238, "supported_languages": null}, "macro.dbt_date.duckdb__week_of_year": {"name": "duckdb__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__week_of_year", "macro_sql": "\n\n{%- macro duckdb__week_of_year(date) -%}\ncast(ceil(dayofyear({{ date }}) / 7) as int)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.889523, "supported_languages": null}, "macro.dbt_date.convert_timezone": {"name": "convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.convert_timezone", "macro_sql": "{%- macro convert_timezone(column, target_tz=None, source_tz=None) -%}\n{%- set source_tz = \"UTC\" if not source_tz else source_tz -%}\n{%- set target_tz = var(\"dbt_date:time_zone\") if not target_tz else target_tz -%}\n{{ adapter.dispatch('convert_timezone', 'dbt_date') (column, target_tz, source_tz) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.postgres__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8907678, "supported_languages": null}, "macro.dbt_date.default__convert_timezone": {"name": "default__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.default__convert_timezone", "macro_sql": "{% macro default__convert_timezone(column, target_tz, source_tz) -%}\nconvert_timezone('{{ source_tz }}', '{{ target_tz }}',\n cast({{ column }} as {{ dbt.type_timestamp() }})\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.890962, "supported_languages": null}, "macro.dbt_date.bigquery__convert_timezone": {"name": "bigquery__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.bigquery__convert_timezone", "macro_sql": "{%- macro bigquery__convert_timezone(column, target_tz, source_tz=None) -%}\ntimestamp(datetime({{ column }}, '{{ target_tz}}'))\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8911061, "supported_languages": null}, "macro.dbt_date.postgres__convert_timezone": {"name": "postgres__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.postgres__convert_timezone", "macro_sql": "{% macro postgres__convert_timezone(column, target_tz, source_tz) -%}\ncast(\n cast({{ column }} as {{ dbt.type_timestamp() }})\n at time zone '{{ source_tz }}' at time zone '{{ target_tz }}' as {{ dbt.type_timestamp() }}\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8913321, "supported_languages": null}, "macro.dbt_date.redshift__convert_timezone": {"name": "redshift__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.redshift__convert_timezone", "macro_sql": "{%- macro redshift__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.default__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.default__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8915122, "supported_languages": null}, "macro.dbt_date.duckdb__convert_timezone": {"name": "duckdb__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.duckdb__convert_timezone", "macro_sql": "{% macro duckdb__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.postgres__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.postgres__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.891695, "supported_languages": null}, "macro.dbt_date.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.spark__convert_timezone", "macro_sql": "{%- macro spark__convert_timezone(column, target_tz, source_tz) -%}\nfrom_utc_timestamp(\n to_utc_timestamp({{ column }}, '{{ source_tz }}'),\n '{{ target_tz }}'\n )\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.891853, "supported_languages": null}, "macro.dbt_date.trino__convert_timezone": {"name": "trino__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.trino__convert_timezone", "macro_sql": "{%- macro trino__convert_timezone(column, target_tz, source_tz) -%}\n cast((at_timezone(with_timezone(cast({{ column }} as {{ dbt.type_timestamp() }}), '{{ source_tz }}'), '{{ target_tz }}')) as {{ dbt.type_timestamp() }})\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.892076, "supported_languages": null}, "macro.dbt_date.n_months_away": {"name": "n_months_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_away.sql", "original_file_path": "macros/calendar_date/n_months_away.sql", "unique_id": "macro.dbt_date.n_months_away", "macro_sql": "{%- macro n_months_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.892406, "supported_languages": null}, "macro.dbt_date.iso_week_of_year": {"name": "iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.iso_week_of_year", "macro_sql": "{%- macro iso_week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8930979, "supported_languages": null}, "macro.dbt_date._iso_week_of_year": {"name": "_iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date._iso_week_of_year", "macro_sql": "{%- macro _iso_week_of_year(date, week_type) -%}\ncast({{ dbt_date.date_part(week_type, date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.893278, "supported_languages": null}, "macro.dbt_date.default__iso_week_of_year": {"name": "default__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.default__iso_week_of_year", "macro_sql": "\n\n{%- macro default__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.893416, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_of_year": {"name": "snowflake__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_of_year", "macro_sql": "\n\n{%- macro snowflake__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.893551, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_of_year": {"name": "postgres__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.postgres__iso_week_of_year", "macro_sql": "\n\n{%- macro postgres__iso_week_of_year(date) -%}\n-- postgresql week is isoweek, the first week of a year containing January 4 of that year.\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.893758, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_of_year": {"name": "duckdb__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_of_year", "macro_sql": "\n\n{%- macro duckdb__iso_week_of_year(date) -%}\n{{ return(dbt_date.postgres__iso_week_of_year(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.893895, "supported_languages": null}, "macro.dbt_date.spark__iso_week_of_year": {"name": "spark__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.spark__iso_week_of_year", "macro_sql": "\n\n{%- macro spark__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.894026, "supported_languages": null}, "macro.dbt_date.trino__iso_week_of_year": {"name": "trino__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.trino__iso_week_of_year", "macro_sql": "\n\n{%- macro trino__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8941572, "supported_languages": null}, "macro.dbt_date.week_end": {"name": "week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.week_end", "macro_sql": "{%- macro week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.894639, "supported_languages": null}, "macro.dbt_date.default__week_end": {"name": "default__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.default__week_end", "macro_sql": "{%- macro default__week_end(date) -%}\n{{ last_day(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.894767, "supported_languages": null}, "macro.dbt_date.snowflake__week_end": {"name": "snowflake__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.snowflake__week_end", "macro_sql": "\n\n{%- macro snowflake__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.894957, "supported_languages": null}, "macro.dbt_date.postgres__week_end": {"name": "postgres__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.postgres__week_end", "macro_sql": "\n\n{%- macro postgres__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.895152, "supported_languages": null}, "macro.dbt_date.duckdb__week_end": {"name": "duckdb__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.duckdb__week_end", "macro_sql": "\n\n{%- macro duckdb__week_end(date) -%}\n{{ return(dbt_date.postgres__week_end(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.895287, "supported_languages": null}, "macro.dbt_date.next_month_number": {"name": "next_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_number.sql", "original_file_path": "macros/calendar_date/next_month_number.sql", "unique_id": "macro.dbt_date.next_month_number", "macro_sql": "{%- macro next_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.next_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.895492, "supported_languages": null}, "macro.dbt_date.last_month_number": {"name": "last_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_number.sql", "original_file_path": "macros/calendar_date/last_month_number.sql", "unique_id": "macro.dbt_date.last_month_number", "macro_sql": "{%- macro last_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.last_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.895694, "supported_languages": null}, "macro.fivetran_utils.enabled_vars": {"name": "enabled_vars", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars.sql", "original_file_path": "macros/enabled_vars.sql", "unique_id": "macro.fivetran_utils.enabled_vars", "macro_sql": "{% macro enabled_vars(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, True) == False %}\n {{ return(False) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(True) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.896066, "supported_languages": null}, "macro.fivetran_utils.percentile": {"name": "percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.percentile", "macro_sql": "{% macro percentile(percentile_field, partition_field, percent) -%}\n\n{{ adapter.dispatch('percentile', 'fivetran_utils') (percentile_field, partition_field, percent) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__percentile"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.897011, "supported_languages": null}, "macro.fivetran_utils.default__percentile": {"name": "default__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.default__percentile", "macro_sql": "{% macro default__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.897287, "supported_languages": null}, "macro.fivetran_utils.redshift__percentile": {"name": "redshift__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.redshift__percentile", "macro_sql": "{% macro redshift__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.897564, "supported_languages": null}, "macro.fivetran_utils.bigquery__percentile": {"name": "bigquery__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.bigquery__percentile", "macro_sql": "{% macro bigquery__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8977559, "supported_languages": null}, "macro.fivetran_utils.postgres__percentile": {"name": "postgres__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.postgres__percentile", "macro_sql": "{% macro postgres__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n /* have to group by partition field */\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.8979032, "supported_languages": null}, "macro.fivetran_utils.spark__percentile": {"name": "spark__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.spark__percentile", "macro_sql": "{% macro spark__percentile(percentile_field, partition_field, percent) %}\n\n percentile( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.898072, "supported_languages": null}, "macro.fivetran_utils.pivot_json_extract": {"name": "pivot_json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/pivot_json_extract.sql", "original_file_path": "macros/pivot_json_extract.sql", "unique_id": "macro.fivetran_utils.pivot_json_extract", "macro_sql": "{% macro pivot_json_extract(string, list_of_properties) %}\n\n{%- for property in list_of_properties -%}\n{%- if property is mapping -%}\nreplace( {{ fivetran_utils.json_extract(string, property.name) }}, '\"', '') as {{ property.alias if property.alias else property.name | replace(' ', '_') | replace('.', '_') | lower }}\n\n{%- else -%}\nreplace( {{ fivetran_utils.json_extract(string, property) }}, '\"', '') as {{ property | replace(' ', '_') | lower }}\n\n{%- endif -%}\n{%- if not loop.last -%},{%- endif %}\n{% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.899037, "supported_languages": null}, "macro.fivetran_utils.persist_pass_through_columns": {"name": "persist_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/persist_pass_through_columns.sql", "original_file_path": "macros/persist_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.persist_pass_through_columns", "macro_sql": "{% macro persist_pass_through_columns(pass_through_variable, identifier=none, transform='') %}\n\n{% if var(pass_through_variable, none) %}\n {% for field in var(pass_through_variable) %}\n , {{ transform ~ '(' ~ (identifier ~ '.' if identifier else '') ~ (field.alias if field.alias else field.name) ~ ')' }} as {{ field.alias if field.alias else field.name }}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.899756, "supported_languages": null}, "macro.fivetran_utils.json_parse": {"name": "json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.json_parse", "macro_sql": "{% macro json_parse(string, string_path) -%}\n\n{{ adapter.dispatch('json_parse', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__json_parse"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.90111, "supported_languages": null}, "macro.fivetran_utils.default__json_parse": {"name": "default__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.default__json_parse", "macro_sql": "{% macro default__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.901348, "supported_languages": null}, "macro.fivetran_utils.redshift__json_parse": {"name": "redshift__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.redshift__json_parse", "macro_sql": "{% macro redshift__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.901582, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_parse": {"name": "bigquery__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.bigquery__json_parse", "macro_sql": "{% macro bigquery__json_parse(string, string_path) %}\n\n \n json_extract_scalar({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.90181, "supported_languages": null}, "macro.fivetran_utils.postgres__json_parse": {"name": "postgres__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.postgres__json_parse", "macro_sql": "{% macro postgres__json_parse(string, string_path) %}\n\n {{string}}::json #>> '{ {%- for s in string_path -%}{{ s }}{%- if not loop.last -%},{%- endif -%}{%- endfor -%} }'\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9020329, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_parse": {"name": "snowflake__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.snowflake__json_parse", "macro_sql": "{% macro snowflake__json_parse(string, string_path) %}\n\n parse_json( {{string}} ) {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.902285, "supported_languages": null}, "macro.fivetran_utils.spark__json_parse": {"name": "spark__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.spark__json_parse", "macro_sql": "{% macro spark__json_parse(string, string_path) %}\n\n {{string}} : {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.902535, "supported_languages": null}, "macro.fivetran_utils.sqlserver__json_parse": {"name": "sqlserver__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.sqlserver__json_parse", "macro_sql": "{% macro sqlserver__json_parse(string, string_path) %}\n\n json_value({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.90276, "supported_languages": null}, "macro.fivetran_utils.max_bool": {"name": "max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.max_bool", "macro_sql": "{% macro max_bool(boolean_field) -%}\n\n{{ adapter.dispatch('max_bool', 'fivetran_utils') (boolean_field) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__max_bool"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.903072, "supported_languages": null}, "macro.fivetran_utils.default__max_bool": {"name": "default__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.default__max_bool", "macro_sql": "{% macro default__max_bool(boolean_field) %}\n\n bool_or( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.903174, "supported_languages": null}, "macro.fivetran_utils.snowflake__max_bool": {"name": "snowflake__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.snowflake__max_bool", "macro_sql": "{% macro snowflake__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.903285, "supported_languages": null}, "macro.fivetran_utils.bigquery__max_bool": {"name": "bigquery__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.bigquery__max_bool", "macro_sql": "{% macro bigquery__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.903381, "supported_languages": null}, "macro.fivetran_utils.calculated_fields": {"name": "calculated_fields", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/calculated_fields.sql", "original_file_path": "macros/calculated_fields.sql", "unique_id": "macro.fivetran_utils.calculated_fields", "macro_sql": "{% macro calculated_fields(variable) -%}\n\n{% if var(variable, none) %}\n {% for field in var(variable) %}\n , {{ field.transform_sql }} as {{ field.name }} \n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.903732, "supported_languages": null}, "macro.fivetran_utils.drop_schemas_automation": {"name": "drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.drop_schemas_automation", "macro_sql": "{% macro drop_schemas_automation(drop_target_schema=true) %}\n {{ return(adapter.dispatch('drop_schemas_automation', 'fivetran_utils')(drop_target_schema)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__drop_schemas_automation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9044108, "supported_languages": null}, "macro.fivetran_utils.default__drop_schemas_automation": {"name": "default__drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.default__drop_schemas_automation", "macro_sql": "{% macro default__drop_schemas_automation(drop_target_schema=true) %}\n\n{% set fetch_list_sql %}\n {% if target.type not in ('databricks', 'spark') %}\n select schema_name\n from \n {{ wrap_in_quotes(target.database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like '{{ target.schema | lower }}{%- if not drop_target_schema -%}_{%- endif -%}%'\n {% else %}\n SHOW SCHEMAS LIKE '{{ target.schema }}{%- if not drop_target_schema -%}_{%- endif -%}*'\n {% endif %}\n{% endset %}\n\n{% set results = run_query(fetch_list_sql) %}\n\n{% if execute %}\n {% set results_list = results.columns[0].values() %}\n{% else %}\n {% set results_list = [] %}\n{% endif %}\n\n{% for schema_to_drop in results_list %}\n {% do adapter.drop_schema(api.Relation.create(database=target.database, schema=schema_to_drop)) %}\n {{ print('Schema ' ~ schema_to_drop ~ ' successfully dropped from the ' ~ target.database ~ ' database.\\n')}}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.wrap_in_quotes", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.905568, "supported_languages": null}, "macro.fivetran_utils.seed_data_helper": {"name": "seed_data_helper", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/seed_data_helper.sql", "original_file_path": "macros/seed_data_helper.sql", "unique_id": "macro.fivetran_utils.seed_data_helper", "macro_sql": "{% macro seed_data_helper(seed_name, warehouses) %}\n\n{% if target.type in warehouses %}\n {% for w in warehouses %}\n {% if target.type == w %}\n {{ return(ref(seed_name ~ \"_\" ~ w ~ \"\")) }}\n {% endif %}\n {% endfor %}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.90611, "supported_languages": null}, "macro.fivetran_utils.fill_pass_through_columns": {"name": "fill_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_pass_through_columns.sql", "original_file_path": "macros/fill_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.fill_pass_through_columns", "macro_sql": "{% macro fill_pass_through_columns(pass_through_variable) %}\n\n{% if var(pass_through_variable) %}\n {% for field in var(pass_through_variable) %}\n {% if field is mapping %}\n {% if field.transform_sql %}\n , {{ field.transform_sql }} as {{ field.alias if field.alias else field.name }}\n {% else %}\n , {{ field.alias if field.alias else field.name }}\n {% endif %}\n {% else %}\n , {{ field }}\n {% endif %}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.906854, "supported_languages": null}, "macro.fivetran_utils.string_agg": {"name": "string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.string_agg", "macro_sql": "{% macro string_agg(field_to_agg, delimiter) -%}\n\n{{ adapter.dispatch('string_agg', 'fivetran_utils') (field_to_agg, delimiter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__string_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.907521, "supported_languages": null}, "macro.fivetran_utils.default__string_agg": {"name": "default__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.default__string_agg", "macro_sql": "{% macro default__string_agg(field_to_agg, delimiter) %}\n string_agg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.907723, "supported_languages": null}, "macro.fivetran_utils.snowflake__string_agg": {"name": "snowflake__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.snowflake__string_agg", "macro_sql": "{% macro snowflake__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.907857, "supported_languages": null}, "macro.fivetran_utils.redshift__string_agg": {"name": "redshift__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.redshift__string_agg", "macro_sql": "{% macro redshift__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9079921, "supported_languages": null}, "macro.fivetran_utils.spark__string_agg": {"name": "spark__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.spark__string_agg", "macro_sql": "{% macro spark__string_agg(field_to_agg, delimiter) %}\n -- collect set will remove duplicates\n replace(replace(replace(cast( collect_set({{ field_to_agg }}) as string), '[', ''), ']', ''), ', ', {{ delimiter }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.908143, "supported_languages": null}, "macro.fivetran_utils.timestamp_diff": {"name": "timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.timestamp_diff", "macro_sql": "{% macro timestamp_diff(first_date, second_date, datepart) %}\n {{ adapter.dispatch('timestamp_diff', 'fivetran_utils')(first_date, second_date, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__timestamp_diff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.911305, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_diff": {"name": "default__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.default__timestamp_diff", "macro_sql": "{% macro default__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.911476, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_diff": {"name": "redshift__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_diff", "macro_sql": "{% macro redshift__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.911635, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_diff": {"name": "bigquery__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_diff", "macro_sql": "{% macro bigquery__timestamp_diff(first_date, second_date, datepart) %}\n\n timestamp_diff(\n {{second_date}},\n {{first_date}},\n {{datepart}}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.911784, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_diff": {"name": "postgres__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_diff", "macro_sql": "{% macro postgres__timestamp_diff(first_date, second_date, datepart) %}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ dbt.datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.941542, "supported_languages": null}, "macro.fivetran_utils.try_cast": {"name": "try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.try_cast", "macro_sql": "{% macro try_cast(field, type) %}\n {{ adapter.dispatch('try_cast', 'fivetran_utils') (field, type) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__try_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.94254, "supported_languages": null}, "macro.fivetran_utils.default__try_cast": {"name": "default__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.default__try_cast", "macro_sql": "{% macro default__try_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.942681, "supported_languages": null}, "macro.fivetran_utils.redshift__try_cast": {"name": "redshift__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.redshift__try_cast", "macro_sql": "{% macro redshift__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when trim({{field}}) ~ '^(0|[1-9][0-9]*)$' then trim({{field}})\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.942951, "supported_languages": null}, "macro.fivetran_utils.postgres__try_cast": {"name": "postgres__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.postgres__try_cast", "macro_sql": "{% macro postgres__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar)) ~ '^(0|[1-9][0-9]*)$' \n then replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar))\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.943232, "supported_languages": null}, "macro.fivetran_utils.snowflake__try_cast": {"name": "snowflake__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.snowflake__try_cast", "macro_sql": "{% macro snowflake__try_cast(field, type) %}\n try_cast(cast({{field}} as varchar) as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.94336, "supported_languages": null}, "macro.fivetran_utils.bigquery__try_cast": {"name": "bigquery__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.bigquery__try_cast", "macro_sql": "{% macro bigquery__try_cast(field, type) %}\n safe_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9434838, "supported_languages": null}, "macro.fivetran_utils.spark__try_cast": {"name": "spark__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.spark__try_cast", "macro_sql": "{% macro spark__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.943609, "supported_languages": null}, "macro.fivetran_utils.sqlserver__try_cast": {"name": "sqlserver__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.sqlserver__try_cast", "macro_sql": "{% macro sqlserver__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9437351, "supported_languages": null}, "macro.fivetran_utils.source_relation": {"name": "source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.source_relation", "macro_sql": "{% macro source_relation(union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('source_relation', 'fivetran_utils') (union_schema_variable, union_database_variable) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__source_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9442198, "supported_languages": null}, "macro.fivetran_utils.default__source_relation": {"name": "default__source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.default__source_relation", "macro_sql": "{% macro default__source_relation(union_schema_variable, union_database_variable) %}\n\n{% if var(union_schema_variable, none) %}\n, case\n {% for schema in var(union_schema_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%.{{ schema|lower }}.%' then '{{ schema|lower }}'\n {% endfor %}\n end as source_relation\n{% elif var(union_database_variable, none) %}\n, case\n {% for database in var(union_database_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%{{ database|lower }}.%' then '{{ database|lower }}'\n {% endfor %}\n end as source_relation\n{% else %}\n, cast('' as {{ dbt.type_string() }}) as source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.944769, "supported_languages": null}, "macro.fivetran_utils.first_value": {"name": "first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.first_value", "macro_sql": "{% macro first_value(first_value_field, partition_field, order_by_field, order=\"asc\") -%}\n\n{{ adapter.dispatch('first_value', 'fivetran_utils') (first_value_field, partition_field, order_by_field, order) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__first_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9452648, "supported_languages": null}, "macro.fivetran_utils.default__first_value": {"name": "default__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.default__first_value", "macro_sql": "{% macro default__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.945462, "supported_languages": null}, "macro.fivetran_utils.redshift__first_value": {"name": "redshift__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.redshift__first_value", "macro_sql": "{% macro redshift__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} , {{ partition_field }} rows unbounded preceding )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.945674, "supported_languages": null}, "macro.fivetran_utils.add_dbt_source_relation": {"name": "add_dbt_source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_dbt_source_relation.sql", "original_file_path": "macros/add_dbt_source_relation.sql", "unique_id": "macro.fivetran_utils.add_dbt_source_relation", "macro_sql": "{% macro add_dbt_source_relation() %}\n\n{% if var('union_schemas', none) or var('union_databases', none) %}\n, _dbt_source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.945919, "supported_languages": null}, "macro.fivetran_utils.add_pass_through_columns": {"name": "add_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_pass_through_columns.sql", "original_file_path": "macros/add_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.add_pass_through_columns", "macro_sql": "{% macro add_pass_through_columns(base_columns, pass_through_var) %}\n\n {% if pass_through_var %}\n\n {% for column in pass_through_var %}\n\n {% if column is mapping %}\n\n {% if column.alias %}\n\n {% do base_columns.append({ \"name\": column.name, \"alias\": column.alias, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column.name, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n \n {% endif %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column, \"datatype\": dbt.type_string()}) %}\n\n {% endif %}\n\n {% endfor %}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.947016, "supported_languages": null}, "macro.fivetran_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, aliases=none, column_override=none, include=[], exclude=[], source_column_name=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n {%- set source_column_name = source_column_name if source_column_name is not none else '_dbt_source_relation' -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column in exclude -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column not in include -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ aliases[loop.index0] if aliases else relation }}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.951387, "supported_languages": null}, "macro.fivetran_utils.union_tables": {"name": "union_tables", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_tables", "macro_sql": "{%- macro union_tables(tables, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_table') -%}\n\n {%- do exceptions.warn(\"Warning: the `union_tables` macro is no longer supported and will be deprecated in a future release of dbt-utils. Use the `union_relations` macro instead\") -%}\n\n {{ return(dbt_utils.union_relations(tables, column_override, include, exclude, source_column_name)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.951798, "supported_languages": null}, "macro.fivetran_utils.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.fivetran_utils.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.952193, "supported_languages": null}, "macro.fivetran_utils.fill_staging_columns": {"name": "fill_staging_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.fill_staging_columns", "macro_sql": "{% macro fill_staging_columns(source_columns, staging_columns) -%}\n\n{%- set source_column_names = source_columns|map(attribute='name')|map('lower')|list -%}\n\n{%- for column in staging_columns %}\n {% if column.name|lower in source_column_names -%}\n {{ fivetran_utils.quote_column(column) }} as \n {%- if 'alias' in column %} {{ column.alias }} {% else %} {{ fivetran_utils.quote_column(column) }} {%- endif -%}\n {%- else -%}\n cast(null as {{ column.datatype }})\n {%- if 'alias' in column %} as {{ column.alias }} {% else %} as {{ fivetran_utils.quote_column(column) }} {% endif -%}\n {%- endif -%}\n {%- if not loop.last -%} , {% endif -%}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.quote_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.95381, "supported_languages": null}, "macro.fivetran_utils.quote_column": {"name": "quote_column", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.quote_column", "macro_sql": "{% macro quote_column(column) %}\n {% if 'quote' in column %}\n {% if column.quote %}\n {% if target.type in ('bigquery', 'spark', 'databricks') %}\n `{{ column.name }}`\n {% elif target.type == 'snowflake' %}\n \"{{ column.name | upper }}\"\n {% else %}\n \"{{ column.name }}\"\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.954347, "supported_languages": null}, "macro.fivetran_utils.json_extract": {"name": "json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.json_extract", "macro_sql": "{% macro json_extract(string, string_path) -%}\n\n{{ adapter.dispatch('json_extract', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.954916, "supported_languages": null}, "macro.fivetran_utils.default__json_extract": {"name": "default__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.default__json_extract", "macro_sql": "{% macro default__json_extract(string, string_path) %}\n\n json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} )\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.955071, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_extract": {"name": "snowflake__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.snowflake__json_extract", "macro_sql": "{% macro snowflake__json_extract(string, string_path) %}\n\n json_extract_path_text(try_parse_json( {{string}} ), {{ \"'\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.955221, "supported_languages": null}, "macro.fivetran_utils.redshift__json_extract": {"name": "redshift__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.redshift__json_extract", "macro_sql": "{% macro redshift__json_extract(string, string_path) %}\n\n case when is_valid_json( {{string}} ) then json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} ) else null end\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.95539, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_extract": {"name": "bigquery__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.bigquery__json_extract", "macro_sql": "{% macro bigquery__json_extract(string, string_path) %}\n\n json_extract_scalar({{string}}, {{ \"'$.\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.955538, "supported_languages": null}, "macro.fivetran_utils.postgres__json_extract": {"name": "postgres__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.postgres__json_extract", "macro_sql": "{% macro postgres__json_extract(string, string_path) %}\n\n {{string}}::json->>{{\"'\" ~ string_path ~ \"'\" }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9556842, "supported_languages": null}, "macro.fivetran_utils.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.956507, "supported_languages": null}, "macro.fivetran_utils.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n\n {%- set enabled_array = [] -%}\n {% for node in graph.sources.values() %}\n {% if node.identifier == source.identifier %}\n {% if (node.meta['is_enabled'] | default(true)) %}\n {%- do enabled_array.append(1) -%}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% set is_enabled = (enabled_array != []) %}\n\n select\n {% if is_enabled %}\n max({{ loaded_at_field }})\n {% else %} \n {{ current_timestamp() }} {% endif %} as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n\n {% if is_enabled %}\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endif %}\n\n {% endcall %}\n\n {% if dbt_version.split('.') | map('int') | list >= [1, 5, 0] %}\n {{ return(load_result('collect_freshness')) }}\n {% else %}\n {{ return(load_result('collect_freshness').table) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.957946, "supported_languages": null}, "macro.fivetran_utils.timestamp_add": {"name": "timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.timestamp_add", "macro_sql": "{% macro timestamp_add(datepart, interval, from_timestamp) -%}\n\n{{ adapter.dispatch('timestamp_add', 'fivetran_utils') (datepart, interval, from_timestamp) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__timestamp_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9587672, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_add": {"name": "default__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.default__timestamp_add", "macro_sql": "{% macro default__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestampadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9589472, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_add": {"name": "bigquery__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_add", "macro_sql": "{% macro bigquery__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestamp_add({{ from_timestamp }}, interval {{ interval }} {{ datepart }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959111, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_add": {"name": "redshift__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_add", "macro_sql": "{% macro redshift__timestamp_add(datepart, interval, from_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959275, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_add": {"name": "postgres__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_add", "macro_sql": "{% macro postgres__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ from_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959442, "supported_languages": null}, "macro.fivetran_utils.spark__timestamp_add": {"name": "spark__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.spark__timestamp_add", "macro_sql": "{% macro spark__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ dbt.dateadd(datepart, interval, from_timestamp) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959622, "supported_languages": null}, "macro.fivetran_utils.ceiling": {"name": "ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.ceiling", "macro_sql": "{% macro ceiling(num) -%}\n\n{{ adapter.dispatch('ceiling', 'fivetran_utils') (num) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__ceiling"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959878, "supported_languages": null}, "macro.fivetran_utils.default__ceiling": {"name": "default__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.default__ceiling", "macro_sql": "{% macro default__ceiling(num) %}\n ceiling({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.959998, "supported_languages": null}, "macro.fivetran_utils.snowflake__ceiling": {"name": "snowflake__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.snowflake__ceiling", "macro_sql": "{% macro snowflake__ceiling(num) %}\n ceil({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9601128, "supported_languages": null}, "macro.fivetran_utils.remove_prefix_from_columns": {"name": "remove_prefix_from_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/remove_prefix_from_columns.sql", "original_file_path": "macros/remove_prefix_from_columns.sql", "unique_id": "macro.fivetran_utils.remove_prefix_from_columns", "macro_sql": "{% macro remove_prefix_from_columns(columns, prefix='', exclude=[]) %}\n\n {%- for col in columns if col.name not in exclude -%}\n {%- if col.name[:prefix|length]|lower == prefix -%}\n {{ col.name }} as {{ col.name[prefix|length:] }}\n {%- else -%}\n {{ col.name }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.960762, "supported_languages": null}, "macro.fivetran_utils.fivetran_date_spine": {"name": "fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.fivetran_date_spine", "macro_sql": "{% macro fivetran_date_spine(datepart, start_date, end_date) -%}\n\n{{ return(adapter.dispatch('fivetran_date_spine', 'fivetran_utils') (datepart, start_date, end_date)) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__fivetran_date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9621708, "supported_languages": null}, "macro.fivetran_utils.default__fivetran_date_spine": {"name": "default__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.default__fivetran_date_spine", "macro_sql": "{% macro default__fivetran_date_spine(datepart, start_date, end_date) %}\n\n {{ dbt_utils.date_spine(datepart, start_date, end_date) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.962369, "supported_languages": null}, "macro.fivetran_utils.sqlserver__fivetran_date_spine": {"name": "sqlserver__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.sqlserver__fivetran_date_spine", "macro_sql": "{% macro sqlserver__fivetran_date_spine(datepart, start_date, end_date) -%}\n\n {% set date_spine_query %}\n with\n\n l0 as (\n\n select c\n from (select 1 union all select 1) as d(c)\n\n ),\n l1 as (\n\n select\n 1 as c\n from l0 as a\n cross join l0 as b\n\n ),\n\n l2 as (\n\n select 1 as c\n from l1 as a\n cross join l1 as b\n ),\n\n l3 as (\n\n select 1 as c\n from l2 as a\n cross join l2 as b\n ),\n\n l4 as (\n\n select 1 as c\n from l3 as a\n cross join l3 as b\n ),\n\n l5 as (\n\n select 1 as c\n from l4 as a\n cross join l4 as b\n ),\n\n nums as (\n\n select row_number() over (order by (select null)) as rownum\n from l5\n ),\n\n rawdata as (\n\n select top ({{dbt.datediff(start_date, end_date, datepart)}}) rownum -1 as n\n from nums\n order by rownum\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n 'n',\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n order by 1\n\n {% endset %}\n\n {% set results = run_query(date_spine_query) %}\n\n {% if execute %}\n\n {% set results_list = results.columns[0].values() %}\n \n {% else %}\n\n {% set results_list = [] %}\n\n {% endif %}\n\n {%- for date_field in results_list %}\n select cast('{{ date_field }}' as date) as date_{{datepart}} {{ 'union all ' if not loop.last else '' }}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt.dateadd", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.963255, "supported_languages": null}, "macro.fivetran_utils.union_data": {"name": "union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.union_data", "macro_sql": "{%- macro union_data(table_identifier, database_variable, schema_variable, default_database, default_schema, default_variable, union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('union_data', 'fivetran_utils') (\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.default__union_data"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.96718, "supported_languages": null}, "macro.fivetran_utils.default__union_data": {"name": "default__union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.default__union_data", "macro_sql": "{%- macro default__union_data(\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) -%}\n\n{%- if var(union_schema_variable, none) -%}\n\n {%- set relations = [] -%}\n \n {%- if var(union_schema_variable) is string -%}\n {%- set trimmed = var(union_schema_variable)|trim('[')|trim(']') -%}\n {%- set schemas = trimmed.split(',')|map('trim',\" \")|map('trim','\"')|map('trim',\"'\") -%}\n {%- else -%}\n {%- set schemas = var(union_schema_variable) -%}\n {%- endif -%}\n\n {%- for schema in var(union_schema_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else var(database_variable, default_database),\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else schema,\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n \n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n \n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- elif var(union_database_variable, none) -%}\n\n {%- set relations = [] -%}\n\n {%- for database in var(union_database_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else database,\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else var(schema_variable, default_schema),\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n\n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n\n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- else -%}\n {% set exception_schemas = {\"linkedin_company_pages\": \"linkedin_pages\", \"instagram_business_pages\": \"instagram_business\"} %}\n {% set relation = namespace(value=\"\") %}\n {% if default_schema in exception_schemas.keys() %}\n {% for corrected_schema_name in exception_schemas.items() %} \n {% if default_schema in corrected_schema_name %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = corrected_schema_name[1] + \"_\" + table_identifier + \"_identifier\" %}\n {%- set relation.value=adapter.get_relation(\n database=source(corrected_schema_name[1], table_identifier).database,\n schema=source(corrected_schema_name[1], table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n {% endfor %}\n {% else %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifier\" %}\n {# Unfortunately the Twitter Organic identifiers were misspelled. As such, we will need to account for this in the model. This will be adjusted in the Twitter Organic package, but to ensure backwards compatibility, this needs to be included. #}\n {% if var(identifier_var, none) is none %} \n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifer\" %}\n {% endif %}\n {%- set relation.value=adapter.get_relation(\n database=source(default_schema, table_identifier).database,\n schema=source(default_schema, table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n{%- set table_exists=relation.value is not none -%}\n\n{%- if table_exists -%}\n select * \n from {{ relation.value }}\n{%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n{%- endif -%}\n{%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.971964, "supported_languages": null}, "macro.fivetran_utils.dummy_coalesce_value": {"name": "dummy_coalesce_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/dummy_coalesce_value.sql", "original_file_path": "macros/dummy_coalesce_value.sql", "unique_id": "macro.fivetran_utils.dummy_coalesce_value", "macro_sql": "{% macro dummy_coalesce_value(column) %}\n\n{% set coalesce_value = {\n 'STRING': \"'DUMMY_STRING'\",\n 'BOOLEAN': 'null',\n 'INT': 999999999,\n 'FLOAT': 999999999.99,\n 'TIMESTAMP': 'cast(\"2099-12-31\" as timestamp)',\n 'DATE': 'cast(\"2099-12-31\" as date)',\n} %}\n\n{% if column.is_float() %}\n{{ return(coalesce_value['FLOAT']) }}\n\n{% elif column.is_numeric() %}\n{{ return(coalesce_value['INT']) }}\n\n{% elif column.is_string() %}\n{{ return(coalesce_value['STRING']) }}\n\n{% elif column.data_type|lower == 'boolean' %}\n{{ return(coalesce_value['BOOLEAN']) }}\n\n{% elif 'timestamp' in column.data_type|lower %}\n{{ return(coalesce_value['TIMESTAMP']) }}\n\n{% elif 'date' in column.data_type|lower %}\n{{ return(coalesce_value['DATE']) }}\n\n{% elif 'int' in column.data_type|lower %}\n{{ return(coalesce_value['INT']) }}\n\n{% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.973359, "supported_languages": null}, "macro.fivetran_utils.extract_url_parameter": {"name": "extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.extract_url_parameter", "macro_sql": "{% macro extract_url_parameter(field, url_parameter) -%}\n\n{{ adapter.dispatch('extract_url_parameter', 'fivetran_utils') (field, url_parameter) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__extract_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9736981, "supported_languages": null}, "macro.fivetran_utils.default__extract_url_parameter": {"name": "default__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.default__extract_url_parameter", "macro_sql": "{% macro default__extract_url_parameter(field, url_parameter) -%}\n\n{{ dbt_utils.get_url_parameter(field, url_parameter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9738472, "supported_languages": null}, "macro.fivetran_utils.spark__extract_url_parameter": {"name": "spark__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.spark__extract_url_parameter", "macro_sql": "{% macro spark__extract_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"=([^&]+)'\" -%}\nnullif(regexp_extract({{ field }}, {{ formatted_url_parameter }}, 1), '')\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.974036, "supported_languages": null}, "macro.fivetran_utils.wrap_in_quotes": {"name": "wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.wrap_in_quotes", "macro_sql": "{%- macro wrap_in_quotes(object_to_quote) -%}\n\n{{ return(adapter.dispatch('wrap_in_quotes', 'fivetran_utils')(object_to_quote)) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.postgres__wrap_in_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9743621, "supported_languages": null}, "macro.fivetran_utils.default__wrap_in_quotes": {"name": "default__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.default__wrap_in_quotes", "macro_sql": "{%- macro default__wrap_in_quotes(object_to_quote) -%}\n{# bigquery, spark, databricks #}\n `{{ object_to_quote }}`\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.974487, "supported_languages": null}, "macro.fivetran_utils.snowflake__wrap_in_quotes": {"name": "snowflake__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.snowflake__wrap_in_quotes", "macro_sql": "{%- macro snowflake__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote | upper }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.974599, "supported_languages": null}, "macro.fivetran_utils.redshift__wrap_in_quotes": {"name": "redshift__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.redshift__wrap_in_quotes", "macro_sql": "{%- macro redshift__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9746969, "supported_languages": null}, "macro.fivetran_utils.postgres__wrap_in_quotes": {"name": "postgres__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.postgres__wrap_in_quotes", "macro_sql": "{%- macro postgres__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.974793, "supported_languages": null}, "macro.fivetran_utils.array_agg": {"name": "array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.array_agg", "macro_sql": "{% macro array_agg(field_to_agg) -%}\n\n{{ adapter.dispatch('array_agg', 'fivetran_utils') (field_to_agg) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__array_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.975043, "supported_languages": null}, "macro.fivetran_utils.default__array_agg": {"name": "default__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.default__array_agg", "macro_sql": "{% macro default__array_agg(field_to_agg) %}\n array_agg({{ field_to_agg }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.975143, "supported_languages": null}, "macro.fivetran_utils.redshift__array_agg": {"name": "redshift__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.redshift__array_agg", "macro_sql": "{% macro redshift__array_agg(field_to_agg) %}\n listagg({{ field_to_agg }}, ',')\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.975241, "supported_languages": null}, "macro.fivetran_utils.empty_variable_warning": {"name": "empty_variable_warning", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/empty_variable_warning.sql", "original_file_path": "macros/empty_variable_warning.sql", "unique_id": "macro.fivetran_utils.empty_variable_warning", "macro_sql": "{% macro empty_variable_warning(variable, downstream_model) %}\n\n{% if not var(variable) %}\n{{ log(\n \"\"\"\n Warning: You have passed an empty list to the \"\"\" ~ variable ~ \"\"\".\n As a result, you won't see the history of any columns in the \"\"\" ~ downstream_model ~ \"\"\" model.\n \"\"\",\n info=True\n) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9756238, "supported_languages": null}, "macro.fivetran_utils.enabled_vars_one_true": {"name": "enabled_vars_one_true", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars_one_true.sql", "original_file_path": "macros/enabled_vars_one_true.sql", "unique_id": "macro.fivetran_utils.enabled_vars_one_true", "macro_sql": "{% macro enabled_vars_one_true(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, False) == True %}\n {{ return(True) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(False) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9759948, "supported_languages": null}, "macro.zendesk.regex_extract": {"name": "regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.regex_extract", "macro_sql": "{% macro regex_extract(string, day) -%}\n\n{{ adapter.dispatch('regex_extract', 'zendesk') (string, day) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.postgres__regex_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.976825, "supported_languages": null}, "macro.zendesk.default__regex_extract": {"name": "default__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.default__regex_extract", "macro_sql": "{% macro default__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n regexp_extract({{ string }}, {{ regex }} )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.977014, "supported_languages": null}, "macro.zendesk.bigquery__regex_extract": {"name": "bigquery__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.bigquery__regex_extract", "macro_sql": "{% macro bigquery__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n regexp_extract({{ string }}, {{ regex }} )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.977202, "supported_languages": null}, "macro.zendesk.snowflake__regex_extract": {"name": "snowflake__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.snowflake__regex_extract", "macro_sql": "{% macro snowflake__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n\n REGEXP_SUBSTR({{ string }}, {{ regex }}, 1, 1, 'e', 1 )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.977466, "supported_languages": null}, "macro.zendesk.postgres__regex_extract": {"name": "postgres__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.postgres__regex_extract", "macro_sql": "{% macro postgres__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n\n (regexp_matches({{ string }}, {{ regex }}))[1]\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9778008, "supported_languages": null}, "macro.zendesk.redshift__regex_extract": {"name": "redshift__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.redshift__regex_extract", "macro_sql": "{% macro redshift__regex_extract(string, day) %}\n\n {% set regex = '\"' ~ day ~ '\"' ~ ':\\\\\\{([^\\\\\\}]*)\\\\\\}' -%}\n\n '{' || REGEXP_SUBSTR({{ string }}, '{{ regex }}', 1, 1, 'e') || '}'\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9780962, "supported_languages": null}, "macro.zendesk.spark__regex_extract": {"name": "spark__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.spark__regex_extract", "macro_sql": "{% macro spark__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" | replace(\"{\", \"\\\\\\{\") | replace(\"}\", \"\\\\\\}\") %}\n regexp_extract({{ string }}, {{ regex }}, 1)\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.978452, "supported_languages": null}, "macro.zendesk.coalesce_cast": {"name": "coalesce_cast", "resource_type": "macro", "package_name": "zendesk", "path": "macros/coalesce_cast.sql", "original_file_path": "macros/coalesce_cast.sql", "unique_id": "macro.zendesk.coalesce_cast", "macro_sql": "{% macro coalesce_cast(column_list, datatype) -%}\n {{ return(adapter.dispatch('coalesce_cast', 'zendesk')(column_list, datatype)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.default__coalesce_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.978838, "supported_languages": null}, "macro.zendesk.default__coalesce_cast": {"name": "default__coalesce_cast", "resource_type": "macro", "package_name": "zendesk", "path": "macros/coalesce_cast.sql", "original_file_path": "macros/coalesce_cast.sql", "unique_id": "macro.zendesk.default__coalesce_cast", "macro_sql": "{% macro default__coalesce_cast(column_list, datatype) %}\n coalesce(\n {%- for column in column_list %}\n cast({{ column }} as {{ datatype }})\n {%- if not loop.last -%},{%- endif -%}\n {% endfor %}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.979129, "supported_languages": null}, "macro.zendesk.clean_schedule": {"name": "clean_schedule", "resource_type": "macro", "package_name": "zendesk", "path": "macros/clean_schedule.sql", "original_file_path": "macros/clean_schedule.sql", "unique_id": "macro.zendesk.clean_schedule", "macro_sql": "{% macro clean_schedule(column_name) -%}\n {{ return(adapter.dispatch('clean_schedule', 'zendesk')(column_name)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.default__clean_schedule"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9794052, "supported_languages": null}, "macro.zendesk.default__clean_schedule": {"name": "default__clean_schedule", "resource_type": "macro", "package_name": "zendesk", "path": "macros/clean_schedule.sql", "original_file_path": "macros/clean_schedule.sql", "unique_id": "macro.zendesk.default__clean_schedule", "macro_sql": "{% macro default__clean_schedule(column_name) -%}\n replace(replace(replace(replace(cast({{ column_name }} as {{ dbt.type_string() }}), '{', ''), '}', ''), '\"', ''), ' ', '')\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.97955, "supported_languages": null}, "macro.zendesk.count_tokens": {"name": "count_tokens", "resource_type": "macro", "package_name": "zendesk", "path": "macros/count_tokens.sql", "original_file_path": "macros/count_tokens.sql", "unique_id": "macro.zendesk.count_tokens", "macro_sql": "{% macro count_tokens(column_name) -%}\n {{ return(adapter.dispatch('count_tokens', 'zendesk')(column_name)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.default__count_tokens"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.979813, "supported_languages": null}, "macro.zendesk.default__count_tokens": {"name": "default__count_tokens", "resource_type": "macro", "package_name": "zendesk", "path": "macros/count_tokens.sql", "original_file_path": "macros/count_tokens.sql", "unique_id": "macro.zendesk.default__count_tokens", "macro_sql": "{% macro default__count_tokens(column_name) %}\n {{ dbt.length(column_name) }} / 4 -- 1 token is approximately 4 characters, and we only need an approximation here.\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9799511, "supported_languages": null}, "macro.zendesk_source.get_domain_name_columns": {"name": "get_domain_name_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_domain_name_columns.sql", "original_file_path": "macros/get_domain_name_columns.sql", "unique_id": "macro.zendesk_source.get_domain_name_columns", "macro_sql": "{% macro get_domain_name_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"domain_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"index\", \"datatype\": dbt.type_int()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9805791, "supported_languages": null}, "macro.zendesk_source.get_user_tag_columns": {"name": "get_user_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_tag_columns.sql", "original_file_path": "macros/get_user_tag_columns.sql", "unique_id": "macro.zendesk_source.get_user_tag_columns", "macro_sql": "{% macro get_user_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.981683, "supported_languages": null}, "macro.zendesk_source.get_audit_log_columns": {"name": "get_audit_log_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_audit_log_columns.sql", "original_file_path": "macros/get_audit_log_columns.sql", "unique_id": "macro.zendesk_source.get_audit_log_columns", "macro_sql": "{% macro get_audit_log_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"action\", \"datatype\": dbt.type_string()},\n {\"name\": \"actor_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"change_description\", \"datatype\": dbt.type_string()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"source_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"source_label\", \"datatype\": dbt.type_string()},\n {\"name\": \"source_type\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.982717, "supported_languages": null}, "macro.zendesk_source.get_ticket_form_history_columns": {"name": "get_ticket_form_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_form_history_columns.sql", "original_file_path": "macros/get_ticket_form_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_form_history_columns", "macro_sql": "{% macro get_ticket_form_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"display_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"end_user_visible\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9837399, "supported_languages": null}, "macro.zendesk_source.get_schedule_columns": {"name": "get_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_columns.sql", "original_file_path": "macros/get_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_columns", "macro_sql": "{% macro get_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"end_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"start_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.985162, "supported_languages": null}, "macro.zendesk_source.get_daylight_time_columns": {"name": "get_daylight_time_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_daylight_time_columns.sql", "original_file_path": "macros/get_daylight_time_columns.sql", "unique_id": "macro.zendesk_source.get_daylight_time_columns", "macro_sql": "{% macro get_daylight_time_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"daylight_end_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"daylight_offset\", \"datatype\": dbt.type_int()},\n {\"name\": \"daylight_start_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"year\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.985893, "supported_languages": null}, "macro.zendesk_source.get_time_zone_columns": {"name": "get_time_zone_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_time_zone_columns.sql", "original_file_path": "macros/get_time_zone_columns.sql", "unique_id": "macro.zendesk_source.get_time_zone_columns", "macro_sql": "{% macro get_time_zone_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"standard_offset\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.986357, "supported_languages": null}, "macro.zendesk_source.get_ticket_tag_columns": {"name": "get_ticket_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_tag_columns.sql", "original_file_path": "macros/get_ticket_tag_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_tag_columns", "macro_sql": "{% macro get_ticket_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.987481, "supported_languages": null}, "macro.zendesk_source.get_organization_tag_columns": {"name": "get_organization_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_tag_columns.sql", "original_file_path": "macros/get_organization_tag_columns.sql", "unique_id": "macro.zendesk_source.get_organization_tag_columns", "macro_sql": "{% macro get_organization_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9889228, "supported_languages": null}, "macro.zendesk_source.get_schedule_holiday_columns": {"name": "get_schedule_holiday_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_holiday_columns.sql", "original_file_path": "macros/get_schedule_holiday_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_holiday_columns", "macro_sql": "{% macro get_schedule_holiday_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_date\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_date\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.989802, "supported_languages": null}, "macro.zendesk_source.get_group_columns": {"name": "get_group_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_group_columns.sql", "original_file_path": "macros/get_group_columns.sql", "unique_id": "macro.zendesk_source.get_group_columns", "macro_sql": "{% macro get_group_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.990656, "supported_languages": null}, "macro.zendesk_source.get_user_columns": {"name": "get_user_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_columns.sql", "original_file_path": "macros/get_user_columns.sql", "unique_id": "macro.zendesk_source.get_user_columns", "macro_sql": "{% macro get_user_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"alias\", \"datatype\": dbt.type_string()},\n {\"name\": \"authenticity_token\", \"datatype\": dbt.type_int()},\n {\"name\": \"chat_only\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"email\", \"datatype\": dbt.type_string()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"last_login_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"locale\", \"datatype\": dbt.type_string()},\n {\"name\": \"locale_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"moderator\", \"datatype\": \"boolean\"},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"only_private_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"phone\", \"datatype\": dbt.type_string()},\n {\"name\": \"remote_photo_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"restricted_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"role\", \"datatype\": dbt.type_string()},\n {\"name\": \"shared\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"signature\", \"datatype\": dbt.type_int()},\n {\"name\": \"suspended\", \"datatype\": \"boolean\"},\n {\"name\": \"ticket_restriction\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"two_factor_auth_enabled\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"verified\", \"datatype\": \"boolean\"}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__user_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_boolean", "macro.dbt.type_string", "macro.dbt.type_int", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.994375, "supported_languages": null}, "macro.zendesk_source.get_ticket_columns": {"name": "get_ticket_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_columns.sql", "original_file_path": "macros/get_ticket_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_columns", "macro_sql": "{% macro get_ticket_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"allow_channelback\", \"datatype\": \"boolean\"},\n {\"name\": \"assignee_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"brand_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"description\", \"datatype\": dbt.type_string()},\n {\"name\": \"due_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"forum_topic_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"has_incidents\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"is_public\", \"datatype\": \"boolean\"},\n {\"name\": \"merged_ticket_ids\", \"datatype\": dbt.type_string()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"priority\", \"datatype\": dbt.type_string()},\n {\"name\": \"problem_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"recipient\", \"datatype\": dbt.type_int()},\n {\"name\": \"requester_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"status\", \"datatype\": dbt.type_string()},\n {\"name\": \"subject\", \"datatype\": dbt.type_string()},\n {\"name\": \"submitter_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_ccs\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_client\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_ip_address\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_json_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_latitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_location\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_longitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_machine_generated\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_message_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_raw_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_form_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"type\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_channel\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_source_from_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_title\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_rel\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_name\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__ticket_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_boolean", "macro.dbt.type_int", "macro.dbt.type_string", "macro.dbt.type_float", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492759.9995441, "supported_languages": null}, "macro.zendesk_source.get_ticket_field_history_columns": {"name": "get_ticket_field_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_field_history_columns.sql", "original_file_path": "macros/get_ticket_field_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_field_history_columns", "macro_sql": "{% macro get_ticket_field_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"field_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"updated\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"value\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492760.000344, "supported_languages": null}, "macro.zendesk_source.get_ticket_schedule_columns": {"name": "get_ticket_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_schedule_columns.sql", "original_file_path": "macros/get_ticket_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_schedule_columns", "macro_sql": "{% macro get_ticket_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492760.000938, "supported_languages": null}, "macro.zendesk_source.get_organization_columns": {"name": "get_organization_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_columns.sql", "original_file_path": "macros/get_organization_columns.sql", "unique_id": "macro.zendesk_source.get_organization_columns", "macro_sql": "{% macro get_organization_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"shared_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_tickets\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__organization_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492760.0023942, "supported_languages": null}, "macro.zendesk_source.get_ticket_comment_columns": {"name": "get_ticket_comment_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_comment_columns.sql", "original_file_path": "macros/get_ticket_comment_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_comment_columns", "macro_sql": "{% macro get_ticket_comment_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_string()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"body\", \"datatype\": dbt.type_string()},\n {\"name\": \"call_duration\", \"datatype\": dbt.type_int()},\n {\"name\": \"call_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"facebook_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"location\", \"datatype\": dbt.type_int()},\n {\"name\": \"public\", \"datatype\": \"boolean\"},\n {\"name\": \"recording_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"started_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_status\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_text\", \"datatype\": dbt.type_int()},\n {\"name\": \"trusted\", \"datatype\": dbt.type_int()},\n {\"name\": \"tweet\", \"datatype\": \"boolean\"},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"voice_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"voice_comment_transcription_visible\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.type_boolean", "macro.dbt.type_int", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492760.004552, "supported_languages": null}, "macro.zendesk_source.get_brand_columns": {"name": "get_brand_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_brand_columns.sql", "original_file_path": "macros/get_brand_columns.sql", "unique_id": "macro.zendesk_source.get_brand_columns", "macro_sql": "{% macro get_brand_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"brand_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"has_help_center\", \"datatype\": \"boolean\"},\n {\"name\": \"help_center_state\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_content_type\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_file_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_height\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_inline\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_mapped_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_size\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_width\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"subdomain\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728492760.006764, "supported_languages": null}}, "docs": {"doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {}, "groups": {}, "selectors": {}, "disabled": {"test.zendesk_integration_tests.consistency_ticket_metrics": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_ticket_metrics", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_ticket_metrics.sql", "original_file_path": "tests/consistency/consistency_ticket_metrics.sql", "unique_id": "test.zendesk_integration_tests.consistency_ticket_metrics", "fqn": ["zendesk_integration_tests", "consistency", "consistency_ticket_metrics"], "alias": "consistency_ticket_metrics", "checksum": {"name": "sha256", "checksum": "e630be25d326f99cdad0ebc1d29e71dcd7514aa3e56c999e56d1ed15bc6c10e0"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.409955, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_prod.zendesk__ticket_metrics\n),\n\ndev as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics\n),\n\nfinal as (\n select \n prod.ticket_id,\n prod.first_reply_time_business_minutes as prod_first_reply_time_business_minutes,\n dev.first_reply_time_business_minutes as dev_first_reply_time_business_minutes,\n prod.first_reply_time_calendar_minutes as prod_first_reply_time_calendar_minutes,\n dev.first_reply_time_calendar_minutes as dev_first_reply_time_calendar_minutes\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n)\n\nselect *\nfrom final\nwhere (abs(prod_first_reply_time_business_minutes - dev_first_reply_time_business_minutes) >= 5\n or abs(prod_first_reply_time_calendar_minutes - dev_first_reply_time_calendar_minutes) >= 5)\n {{ \"and ticket_id not in \" ~ var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policy_count": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_sla_policy_count", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policy_count.sql", "original_file_path": "tests/consistency/consistency_sla_policy_count.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policy_count", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policy_count"], "alias": "consistency_sla_policy_count", "checksum": {"name": "sha256", "checksum": "b30a06ff7e3d392b2fdfa6b5f34633f6c7f8e018e31eef64fcdf2eeaffcae18a"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.427278, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n {{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) }}\n group by 1\n),\n\ndev as (\n select\n ticket_id,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n {{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) }}\n group by 1\n),\n\nfinal as (\n select \n prod.ticket_id as prod_ticket_id,\n dev.ticket_id as dev_ticket_id,\n prod.total_slas as prod_sla_total,\n dev.total_slas as dev_sla_total\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n)\n\nselect *\nfrom final\nwhere prod_sla_total != dev_sla_total", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policies": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_sla_policies", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policies.sql", "original_file_path": "tests/consistency/consistency_sla_policies.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policies", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policies"], "alias": "consistency_sla_policies", "checksum": {"name": "sha256", "checksum": "bdad5490a4a975665c4b658101726f92c08755dd96f6372d8606b47e60fe29d4"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.431509, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select \n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n round(sla_elapsed_time, -1) as sla_elapsed_time, --round to the nearest tens\n is_active_sla,\n is_sla_breach\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n),\n\ndev as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n round(sla_elapsed_time, -1) as sla_elapsed_time, --round to the nearest tens\n is_active_sla,\n is_sla_breach\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n),\n\nprod_not_in_dev as (\n -- rows from prod not found in dev\n select * from prod\n except distinct\n select * from dev\n),\n\ndev_not_in_prod as (\n -- rows from dev not found in prod\n select * from dev\n except distinct\n select * from prod\n),\n\nfinal as (\n select\n *,\n 'from prod' as source\n from prod_not_in_dev\n\n union all -- union since we only care if rows are produced\n\n select\n *,\n 'from dev' as source\n from dev_not_in_prod\n)\n\nselect *\nfrom final\n{{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policies_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policies_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.metrics_count_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "metrics_count_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/metrics_count_match.sql", "original_file_path": "tests/integrity/metrics_count_match.sql", "unique_id": "test.zendesk_integration_tests.metrics_count_match", "fqn": ["zendesk_integration_tests", "integrity", "metrics_count_match"], "alias": "metrics_count_match", "checksum": {"name": "sha256", "checksum": "a1b9b09d680906335f534a5707924cdd7975615c0f3192a51e790183e4625724"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.435719, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n-- check that all the tickets are accounted for in the metrics\nwith stg_count as (\n select\n count(*) as stg_ticket_count\n from {{ ref('stg_zendesk__ticket') }}\n),\n\nmetric_count as (\n select\n count(*) as metric_ticket_count\n from {{ ref('zendesk__ticket_metrics') }}\n)\n\nselect *\nfrom stg_count\njoin metric_count\n on stg_ticket_count != metric_ticket_count", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_metrics_parity": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_metrics_parity", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_metrics_parity.sql", "original_file_path": "tests/integrity/sla_metrics_parity.sql", "unique_id": "test.zendesk_integration_tests.sla_metrics_parity", "fqn": ["zendesk_integration_tests", "integrity", "sla_metrics_parity"], "alias": "sla_metrics_parity", "checksum": {"name": "sha256", "checksum": "d18407ef45d1ce6b2d4eeaca9286dfb8b3b1db85021e3fd69701fb0c33138675"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.439052, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n/*\nThis test is to ensure the sla_elapsed_time from zendesk__sla_policies matches the corresponding time in zendesk__ticket_metrics.\n*/\n\nwith dev_slas as (\n select *\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n where in_business_hours\n\n), dev_metrics as (\n select *\n from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics\n\n), dev_compare as (\n select \n dev_slas.ticket_id,\n dev_slas.metric,\n cast(dev_slas.sla_elapsed_time as {{ dbt.type_int() }}) as time_from_slas,\n case when metric = 'agent_work_time' then dev_metrics.agent_work_time_in_business_minutes\n when metric = 'requester_wait_time' then dev_metrics.requester_wait_time_in_business_minutes\n when metric = 'first_reply_time' then dev_metrics.first_reply_time_business_minutes\n end as time_from_metrics\n from dev_slas\n left join dev_metrics\n on dev_metrics.ticket_id = dev_slas.ticket_id\n)\n\nselect *\nfrom dev_compare\nwhere abs(time_from_slas - time_from_metrics) >= 5\n{{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_metric_parity_exclusion_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_metric_parity_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_first_reply_time_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_first_reply_time_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_first_reply_time_match.sql", "original_file_path": "tests/integrity/sla_first_reply_time_match.sql", "unique_id": "test.zendesk_integration_tests.sla_first_reply_time_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_first_reply_time_match"], "alias": "sla_first_reply_time_match", "checksum": {"name": "sha256", "checksum": "a94e41e1bdbc5f4cb6268590d22f37692a708dd7471344b09e2d29a4edf4ccea"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.443585, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith ticket_metrics as (\n select\n ticket_id,\n first_reply_time_business_minutes\n from {{ ref('zendesk__ticket_metrics') }}\n),\n\nsla_policies as (\n select\n ticket_id,\n sla_elapsed_time\n from {{ ref('zendesk__sla_policies') }}\n where metric = 'first_reply_time'\n and in_business_hours\n),\n\nmatch_check as (\n select \n ticket_metrics.ticket_id,\n ticket_metrics.first_reply_time_business_minutes,\n sla_policies.sla_elapsed_time\n from ticket_metrics\n full outer join sla_policies \n on ticket_metrics.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere abs(round(first_reply_time_business_minutes,0) - round(sla_elapsed_time,0)) >= 2\n {{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_count_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_count_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_count_match.sql", "original_file_path": "tests/integrity/sla_count_match.sql", "unique_id": "test.zendesk_integration_tests.sla_count_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_count_match"], "alias": "sla_count_match", "checksum": {"name": "sha256", "checksum": "b1f23baf0d04729d4855197e4e5f6e76bf72502c3739371ebee1a6d626a6d8b8"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728492760.447191, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n-- The necessary source and source_filter adjustments used below originate from the int_zendesk__sla_policy_applied model\nwith source as (\n select\n *,\n case when field_name = 'first_reply_time' then row_number() over (partition by ticket_id, field_name order by valid_starting_at desc) else 1 end as latest_sla\n from {{ ref('stg_zendesk__ticket_field_history') }}\n),\n\nsource_filter as (\n select\n ticket_id,\n count(*) as source_row_count\n from source\n where field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n and value is not null\n and latest_sla = 1\n group by 1\n),\n\nsla_policies as (\n select\n ticket_id,\n count(*) as end_model_row_count\n from {{ ref('zendesk__sla_policies') }}\n group by 1\n),\n\nmatch_check as (\n select \n sla_policies.ticket_id,\n end_model_row_count,\n source_row_count\n from sla_policies\n full outer join source_filter\n on source_filter.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere end_model_row_count != source_row_count\n{{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_count_match_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_count_match_tickets',[]) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "seed.zendesk_integration_tests.organization_tag_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data_snowflake.csv", "original_file_path": "seeds/organization_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "organization_tag_data_snowflake"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "d9219b78d44b8b4620100b064a3af350fb5fa2046bdb0c376a09bade7a99f6f7"}, "config": {"enabled": false, "alias": "organization_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "organization_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1728492760.5301702, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.brand_data": [{"database": "postgres", "schema": "zz_zendesk", "name": "brand_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data.csv", "original_file_path": "seeds/brand_data.csv", "unique_id": "seed.zendesk_integration_tests.brand_data", "fqn": ["zendesk_integration_tests", "brand_data"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "203980ef5845715ee0758982a85b96a30c8e4b06fbda7f104705bd4cdd586aa9"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'postgres' else false }}"}, "created_at": 1728492760.537546, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.user_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "user_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data_snowflake.csv", "original_file_path": "seeds/user_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_data_snowflake", "fqn": ["zendesk_integration_tests", "user_data_snowflake"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "1d7712839e43bb49c4fb8a2bba60a98e8c3ea558c91a3d4fb4f4db6e1425f178"}, "config": {"enabled": false, "alias": "user_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "external_id": "bigint", "locale_id": "bigint", "organization_id": "bigint", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "alias": "user_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1728492760.539969, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.user_tag_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "user_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data_snowflake.csv", "original_file_path": "seeds/user_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "user_tag_data_snowflake"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "7c2274e05f81c1f9906a6a4a217c4493bf003a151402391069f49c64cf9ec5fb"}, "config": {"enabled": false, "alias": "user_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "user_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1728492760.5429592, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}]}, "parent_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.audit_log_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.brand_data_postgres": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__group"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.zendesk__ticket_summary": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.zendesk__sla_policies": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.zendesk__ticket_backlog": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__schedule"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__sla_policy_applied"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__updater_information", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_enriched", "source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__calendar_spine": ["model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__timezone_daylight": ["model.zendesk_source.stg_zendesk__daylight_time", "model.zendesk_source.stg_zendesk__time_zone"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.zendesk__document": ["model.zendesk.int_zendesk__ticket_comment_documents_grouped", "model.zendesk.int_zendesk__ticket_document"], "model.zendesk.int_zendesk__ticket_comment_documents_grouped": ["model.zendesk.int_zendesk__ticket_comment_document"], "model.zendesk.int_zendesk__ticket_comment_document": ["model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__ticket_document": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__updates": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__schedule_holiday", "model.zendesk.int_zendesk__schedule_timezones"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__schedule_timezones": ["model.zendesk.int_zendesk__schedule_history", "model.zendesk.int_zendesk__timezone_daylight", "model.zendesk_source.stg_zendesk__schedule"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__schedule_history": ["model.zendesk_source.stg_zendesk__audit_log"], "model.zendesk.int_zendesk__schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk_source.stg_zendesk__domain_name", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk_source.stg_zendesk__group_tmp"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk_source.stg_zendesk__user_tmp"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "model.zendesk_source.stg_zendesk__audit_log": ["model.zendesk_source.stg_zendesk__audit_log_tmp"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk_source.stg_zendesk__ticket_tmp"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["source.zendesk_source.zendesk.daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["source.zendesk_source.zendesk.user"], "model.zendesk_source.stg_zendesk__group_tmp": ["source.zendesk_source.zendesk.group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["source.zendesk_source.zendesk.ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["source.zendesk_source.zendesk.brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["source.zendesk_source.zendesk.ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["source.zendesk_source.zendesk.schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["source.zendesk_source.zendesk.user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["source.zendesk_source.zendesk.ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["source.zendesk_source.zendesk.ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["source.zendesk_source.zendesk.organization_tag"], "model.zendesk_source.stg_zendesk__audit_log_tmp": ["source.zendesk_source.zendesk.audit_log"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["source.zendesk_source.zendesk.schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["source.zendesk_source.zendesk.organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["source.zendesk_source.zendesk.ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["source.zendesk_source.zendesk.domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["source.zendesk_source.zendesk.time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": ["model.zendesk.zendesk__sla_policies"], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": ["model.zendesk_source.stg_zendesk__domain_name"], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": ["model.zendesk_source.stg_zendesk__daylight_time"], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "source.zendesk_source.zendesk.audit_log": [], "source.zendesk_source.zendesk.ticket": [], "source.zendesk_source.zendesk.brand": [], "source.zendesk_source.zendesk.domain_name": [], "source.zendesk_source.zendesk.group": [], "source.zendesk_source.zendesk.organization_tag": [], "source.zendesk_source.zendesk.organization": [], "source.zendesk_source.zendesk.ticket_comment": [], "source.zendesk_source.zendesk.user_tag": [], "source.zendesk_source.zendesk.user": [], "source.zendesk_source.zendesk.schedule": [], "source.zendesk_source.zendesk.ticket_schedule": [], "source.zendesk_source.zendesk.ticket_form_history": [], "source.zendesk_source.zendesk.ticket_tag": [], "source.zendesk_source.zendesk.ticket_field_history": [], "source.zendesk_source.zendesk.daylight_time": [], "source.zendesk_source.zendesk.time_zone": [], "source.zendesk_source.zendesk.schedule_holiday": []}, "child_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.audit_log_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.brand_data_postgres": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.zendesk__ticket_metrics", "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.zendesk__ticket_summary", "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c"], "model.zendesk.zendesk__ticket_summary": [], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.zendesk__ticket_backlog"], "model.zendesk.zendesk__sla_policies": ["test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd"], "model.zendesk.zendesk__ticket_backlog": [], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_reply_times"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__field_history_enriched"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__calendar_spine": ["model.zendesk.int_zendesk__field_calendar_spine"], "model.zendesk.int_zendesk__timezone_daylight": ["model.zendesk.int_zendesk__schedule_timezones"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.zendesk__document": [], "model.zendesk.int_zendesk__ticket_comment_documents_grouped": ["model.zendesk.zendesk__document"], "model.zendesk.int_zendesk__ticket_comment_document": ["model.zendesk.int_zendesk__ticket_comment_documents_grouped"], "model.zendesk.int_zendesk__ticket_document": ["model.zendesk.zendesk__document"], "model.zendesk.int_zendesk__updates": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__schedule_timezones": ["model.zendesk.int_zendesk__schedule_spine"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__schedule_history": ["model.zendesk.int_zendesk__schedule_timezones"], "model.zendesk.int_zendesk__schedule_holiday": ["model.zendesk.int_zendesk__schedule_spine"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk.int_zendesk__user_aggregates"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk.int_zendesk__ticket_aggregates"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_history_enriched", "model.zendesk.int_zendesk__updates"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk.int_zendesk__schedule_holiday", "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk.int_zendesk__timezone_daylight", "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk.int_zendesk__timezone_daylight", "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_enriched", "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk.int_zendesk__ticket_comment_document", "model.zendesk.int_zendesk__updates", "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__schedule_holiday", "model.zendesk.int_zendesk__schedule_timezones", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__ticket_comment_document", "model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_summary", "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk.int_zendesk__latest_ticket_form", "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17"], "model.zendesk_source.stg_zendesk__audit_log": ["model.zendesk.int_zendesk__schedule_history"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk.int_zendesk__organization_aggregates", "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk.int_zendesk__organization_aggregates"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__calendar_spine", "model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["model.zendesk_source.stg_zendesk__daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["model.zendesk_source.stg_zendesk__user"], "model.zendesk_source.stg_zendesk__group_tmp": ["model.zendesk_source.stg_zendesk__group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["model.zendesk_source.stg_zendesk__ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["model.zendesk_source.stg_zendesk__brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["model.zendesk_source.stg_zendesk__ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["model.zendesk_source.stg_zendesk__organization_tag"], "model.zendesk_source.stg_zendesk__audit_log_tmp": ["model.zendesk_source.stg_zendesk__audit_log"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["model.zendesk_source.stg_zendesk__schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["model.zendesk_source.stg_zendesk__organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["model.zendesk_source.stg_zendesk__domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": [], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": [], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": [], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": [], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": [], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": [], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": [], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": [], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": [], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": [], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": [], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": [], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": [], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": [], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": [], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": [], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": [], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": [], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": [], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": [], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": [], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": [], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": [], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": [], "source.zendesk_source.zendesk.audit_log": ["model.zendesk_source.stg_zendesk__audit_log_tmp"], "source.zendesk_source.zendesk.ticket": ["model.zendesk_source.stg_zendesk__ticket_tmp"], "source.zendesk_source.zendesk.brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "source.zendesk_source.zendesk.domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "source.zendesk_source.zendesk.group": ["model.zendesk_source.stg_zendesk__group_tmp"], "source.zendesk_source.zendesk.organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "source.zendesk_source.zendesk.organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "source.zendesk_source.zendesk.ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "source.zendesk_source.zendesk.user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "source.zendesk_source.zendesk.user": ["model.zendesk_source.stg_zendesk__user_tmp"], "source.zendesk_source.zendesk.schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "source.zendesk_source.zendesk.ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "source.zendesk_source.zendesk.ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "source.zendesk_source.zendesk.ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "source.zendesk_source.zendesk.ticket_field_history": ["model.zendesk.int_zendesk__field_history_pivot", "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "source.zendesk_source.zendesk.daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "source.zendesk_source.zendesk.time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "source.zendesk_source.zendesk.schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {}, "unit_tests": {}} \ No newline at end of file +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": "1.8.3", "generated_at": "2024-10-09T22:37:23.041694Z", "invocation_id": "85319b70-bc7d-461c-bfee-a3b90f57ada9", "env": {}, "project_name": "zendesk_integration_tests", "project_id": "b8a12ac1bacdf035438fc7646299ce11", "user_id": "8268eefe-e8f7-472e-ab2a-a92f0135d76d", "send_anonymous_usage_stats": true, "adapter_type": "postgres"}, "nodes": {"seed.zendesk_integration_tests.organization_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data.csv", "original_file_path": "seeds/organization_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data", "fqn": ["zendesk_integration_tests", "organization_tag_data"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "adebcb3827e908ab449435adc556aadf587cfad4103cab2c840d3d9fddc16e20"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1728513415.494187, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_comment_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_comment_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_comment_data.csv", "original_file_path": "seeds/ticket_comment_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_comment_data", "fqn": ["zendesk_integration_tests", "ticket_comment_data"], "alias": "ticket_comment_data", "checksum": {"name": "sha256", "checksum": "033e18229b848b4809699f04f39605771faf437e583a1aefe1af5625f0ac7de5"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "user_id": "bigint", "created": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created": "timestamp"}}, "created_at": 1728513415.495407, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_comment_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_holiday_data": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_holiday_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_holiday_data.csv", "original_file_path": "seeds/schedule_holiday_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data", "fqn": ["zendesk_integration_tests", "schedule_holiday_data"], "alias": "schedule_holiday_data", "checksum": {"name": "sha256", "checksum": "f907dea5e2dc21649bf4eae0392add96a884f19f900dc0f2d568141038ba5d28"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "schedule_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1728513415.497876, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_holiday_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.domain_name_data": {"database": "postgres", "schema": "zz_zendesk", "name": "domain_name_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "domain_name_data.csv", "original_file_path": "seeds/domain_name_data.csv", "unique_id": "seed.zendesk_integration_tests.domain_name_data", "fqn": ["zendesk_integration_tests", "domain_name_data"], "alias": "domain_name_data", "checksum": {"name": "sha256", "checksum": "3bf711417f9269957353aa9e1ddd28ada8bd74e03128a4b8c94e694a560a09cf"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1728513415.50031, "relation_name": "\"postgres\".\"zz_zendesk\".\"domain_name_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_field_history_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_field_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_field_history_data.csv", "original_file_path": "seeds/ticket_field_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data", "fqn": ["zendesk_integration_tests", "ticket_field_history_data"], "alias": "ticket_field_history_data", "checksum": {"name": "sha256", "checksum": "47c9244103b9a8dc25c5ce75693b8389df92258dde23dae71a09f021cf1b7ab7"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "bigint", "user_id": "bigint", "updated": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "updated": "timestamp"}}, "created_at": 1728513415.502702, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.audit_log_data": {"database": "postgres", "schema": "zz_zendesk", "name": "audit_log_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "audit_log_data.csv", "original_file_path": "seeds/audit_log_data.csv", "unique_id": "seed.zendesk_integration_tests.audit_log_data", "fqn": ["zendesk_integration_tests", "audit_log_data"], "alias": "audit_log_data", "checksum": {"name": "sha256", "checksum": "9979d1f37155833b5af3a3de6d9bcca8ac3143b8ecd59e32efca95a1b8e44b10"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728513415.503882, "relation_name": "\"postgres\".\"zz_zendesk\".\"audit_log_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_data.csv", "original_file_path": "seeds/ticket_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_data", "fqn": ["zendesk_integration_tests", "ticket_data"], "alias": "ticket_data", "checksum": {"name": "sha256", "checksum": "effe2837ec0ff3ec59fddc7fce0a5f4a6ff0a69daef5ae904244dcbf34425dae"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "bigint", "brand_id": "bigint", "external_id": "bigint", "forum_topic_id": "bigint", "group_id": "bigint", "organization_id": "bigint", "problem_id": "bigint", "requester_id": "bigint", "submitter_id": "bigint", "ticket_form_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "brand_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "forum_topic_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "group_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "problem_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "requester_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "submitter_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "ticket_form_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1728513415.505106, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.brand_data_postgres": {"database": "postgres", "schema": "zz_zendesk", "name": "brand_data_postgres", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data_postgres.csv", "original_file_path": "seeds/brand_data_postgres.csv", "unique_id": "seed.zendesk_integration_tests.brand_data_postgres", "fqn": ["zendesk_integration_tests", "brand_data_postgres"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "aa338ab31e4a221da8a0ed5040ec921a4d39a7377ae37a7e79b49e1402e490f5"}, "config": {"enabled": true, "alias": "brand_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "alias": "brand_data", "enabled": "{{ true if target.type == 'postgres' else false }}"}, "created_at": 1728513415.506379, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.time_zone_data": {"database": "postgres", "schema": "zz_zendesk", "name": "time_zone_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "time_zone_data.csv", "original_file_path": "seeds/time_zone_data.csv", "unique_id": "seed.zendesk_integration_tests.time_zone_data", "fqn": ["zendesk_integration_tests", "time_zone_data"], "alias": "time_zone_data", "checksum": {"name": "sha256", "checksum": "b02df4f14e54c7deb0b15c40b35196968de4374ceb1cc5ad95986620a506adb2"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728513415.507603, "relation_name": "\"postgres\".\"zz_zendesk\".\"time_zone_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_schedule_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_schedule_data.csv", "original_file_path": "seeds/ticket_schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data", "fqn": ["zendesk_integration_tests", "ticket_schedule_data"], "alias": "ticket_schedule_data", "checksum": {"name": "sha256", "checksum": "dc4892d18f3730242f5319bb24498d77a4c32a666b6b4d5c0eec0d4dafd7224b"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "bigint", "schedule_id": "bigint", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1728513415.508768, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_schedule_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.daylight_time_data": {"database": "postgres", "schema": "zz_zendesk", "name": "daylight_time_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "daylight_time_data.csv", "original_file_path": "seeds/daylight_time_data.csv", "unique_id": "seed.zendesk_integration_tests.daylight_time_data", "fqn": ["zendesk_integration_tests", "daylight_time_data"], "alias": "daylight_time_data", "checksum": {"name": "sha256", "checksum": "17642d90548c6367ab328762a47066a905e3ba2da8831cd86ef37ac659a38fc9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728513415.509921, "relation_name": "\"postgres\".\"zz_zendesk\".\"daylight_time_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_data": {"database": "postgres", "schema": "zz_zendesk", "name": "user_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data.csv", "original_file_path": "seeds/user_data.csv", "unique_id": "seed.zendesk_integration_tests.user_data", "fqn": ["zendesk_integration_tests", "user_data"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "9f600c24b84ed0183e88c5aaa4e7e02bd2228115bebc85217f04c97bd5b6dbc9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "external_id": "bigint", "locale_id": "bigint", "organization_id": "bigint", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1728513415.5111098, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_data": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_data.csv", "original_file_path": "seeds/schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_data", "fqn": ["zendesk_integration_tests", "schedule_data"], "alias": "schedule_data", "checksum": {"name": "sha256", "checksum": "e2596e44df02b53d13b850f9742084141b7b75755baae603c8d3db6b8354107a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "end_time": "bigint", "start_time": "bigint", "end_time_utc": "bigint", "start_time_utc": "bigint", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1728513415.512375, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_tag_data.csv", "original_file_path": "seeds/ticket_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_tag_data", "fqn": ["zendesk_integration_tests", "ticket_tag_data"], "alias": "ticket_tag_data", "checksum": {"name": "sha256", "checksum": "020b25c3247e21387702778ce0af4e5a5b8b3aee62daaa05f48c643489b57ea0"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1728513415.5135329, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.organization_data": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_data.csv", "original_file_path": "seeds/organization_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_data", "fqn": ["zendesk_integration_tests", "organization_data"], "alias": "organization_data", "checksum": {"name": "sha256", "checksum": "b3e00faed1ea214f73182b110c5f55653a5b43f2bc082dcb87f6c63dea5303c3"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1728513415.514692, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_form_history_data": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_form_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_form_history_data.csv", "original_file_path": "seeds/ticket_form_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data", "fqn": ["zendesk_integration_tests", "ticket_form_history_data"], "alias": "ticket_form_history_data", "checksum": {"name": "sha256", "checksum": "a5b4edef05a0baa9acac87db3eea1ac0ba55865809db778ff458e20b7352c665"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1728513415.515876, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_form_history_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.group_data": {"database": "postgres", "schema": "zz_zendesk", "name": "group_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "group_data.csv", "original_file_path": "seeds/group_data.csv", "unique_id": "seed.zendesk_integration_tests.group_data", "fqn": ["zendesk_integration_tests", "group_data"], "alias": "group_data", "checksum": {"name": "sha256", "checksum": "ded51f1b267e9785ca862ca30656faa2485b5814d834ea35de6892702c3dbd1a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1728513415.5170481, "relation_name": "\"postgres\".\"zz_zendesk\".\"group_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_tag_data": {"database": "postgres", "schema": "zz_zendesk", "name": "user_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data.csv", "original_file_path": "seeds/user_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data", "fqn": ["zendesk_integration_tests", "user_tag_data"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "fde0d85263495e783fd6fb342940a4dcd67c39581d55bfc9b28935d24367a096"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "user_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1728513415.518254, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "model.zendesk.zendesk__ticket_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_enriched", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_enriched.sql", "original_file_path": "models/zendesk__ticket_enriched.sql", "unique_id": "model.zendesk.zendesk__ticket_enriched", "fqn": ["zendesk", "zendesk__ticket_enriched"], "alias": "zendesk__ticket_enriched", "checksum": {"name": "sha256", "checksum": "8d5ccce79dd53bd307569a9a086b4205cfebbd616bb74b594766e524a281c244"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk ticket, enriched with data about it's tags, assignees, requester, submitter, organization and group.", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the ticket has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513416.403866, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"", "raw_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n{% if var('using_ticket_form_history', True) %}\n), latest_ticket_form as (\n\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), latest_satisfaction_ratings as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_satisfaction') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), requester_updates as (\n\n select *\n from {{ ref('int_zendesk__requester_updates') }}\n\n), assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__assignee_updates') }}\n\n), ticket_group as (\n \n select *\n from {{ ref('stg_zendesk__group') }}\n\n), organization as (\n\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n latest_ticket_form.name as ticket_form_name,\n {% endif %}\n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n {% endif %}\n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n requester_org.organization_tags as requester_organization_tags,\n {% endif %}\n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n {% endif %}\n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n {% endif %}\n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "language": "sql", "refs": [{"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}, {"name": "int_zendesk__latest_ticket_form", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_satisfaction", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__requester_updates", "package": null, "version": null}, {"name": "int_zendesk__assignee_updates", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__assignee_updates", "model.zendesk_source.stg_zendesk__group", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_enriched.sql", "compiled": true, "compiled_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n\n), latest_ticket_form as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__latest_ticket_form\"\n\n\n), latest_satisfaction_ratings as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), requester_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_updates\"\n\n), assignee_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__assignee_updates\"\n\n), ticket_group as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\n\n), organization as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n latest_ticket_form.name as ticket_form_name,\n \n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n \n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n requester_org.organization_tags as requester_organization_tags,\n \n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n \n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n \n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n \n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_metrics": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_metrics", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_metrics.sql", "original_file_path": "models/zendesk__ticket_metrics.sql", "unique_id": "model.zendesk.zendesk__ticket_metrics", "fqn": ["zendesk", "zendesk__ticket_metrics"], "alias": "zendesk__ticket_metrics", "checksum": {"name": "sha256", "checksum": "71977e3eeb4ea80a2beb205ad3dde4fb9aac17cb8391df9c39b854e658d124cb"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk Support ticket, enriched with metrics about reply times, resolution times and work times. Calendar and business hours are supported", "columns": {"first_reply_time_calendar_minutes": {"name": "first_reply_time_calendar_minutes", "description": "The number of calendar minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_reply_time_business_minutes": {"name": "first_reply_time_business_minutes", "description": "The number of business minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_reply_time_calendar_minutes": {"name": "total_reply_time_calendar_minutes", "description": "The combined calendar time between all end-user comments and the next public agent response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_solved_at": {"name": "first_solved_at", "description": "The time the ticket was first in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_solved_at": {"name": "last_solved_at", "description": "The time the ticket was last in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_calendar_minutes": {"name": "first_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "final_resolution_calendar_minutes": {"name": "final_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_one_touch_resolution": {"name": "is_one_touch_resolution", "description": "A boolean field indicating that the ticket has one public agent response and is in solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_business_minutes": {"name": "first_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "full_resolution_business_minutes": {"name": "full_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_business_minutes": {"name": "agent_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_business_minutes": {"name": "requester_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_business_minutes": {"name": "solve_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_business_minutes": {"name": "agent_work_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_business_minutes": {"name": "on_hold_time_in_business_minutes", "description": "The combined number of business minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_business_minutes": {"name": "new_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_business_minutes": {"name": "open_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_calendar_minutes": {"name": "agent_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_calendar_minutes": {"name": "requester_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_calendar_minutes": {"name": "solve_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_calendar_minutes": {"name": "agent_work_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_calendar_minutes": {"name": "on_hold_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_agent_comments": {"name": "count_agent_comments", "description": "Count of agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_agent_comments": {"name": "count_public_agent_comments", "description": "Count of public agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_end_user_comments": {"name": "count_end_user_comments", "description": "Count of end user comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_internal_comments": {"name": "count_internal_comments", "description": "Count of internal comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_comments": {"name": "count_public_comments", "description": "Count of public comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_comments": {"name": "total_comments", "description": "Total count of all comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_ticket_handoffs": {"name": "count_ticket_handoffs", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": [], "dev_snowflake": "Count of distinct internal users who have touched/commented on the ticket."}, "unique_assignee_count": {"name": "unique_assignee_count", "description": "The count of unique assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_stations_count": {"name": "assignee_stations_count", "description": "The total number of assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_stations_count": {"name": "group_stations_count", "description": "The total count of group stations within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignee_id": {"name": "first_assignee_id", "description": "Assignee id of the first agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignee_id": {"name": "last_assignee_id", "description": "Assignee id of the last agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_agent_assignment_date": {"name": "first_agent_assignment_date", "description": "The date the first agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_agent_assignment_date": {"name": "last_agent_assignment_date", "description": "The date the last agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignment_to_resolution_calendar_minutes": {"name": "first_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the first assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignment_to_resolution_calendar_minutes": {"name": "last_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the last assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_resolutions": {"name": "count_resolutions", "description": "The count of ticket resolutions", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_reopens": {"name": "count_reopens", "description": "The count of ticket reopen events", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_calendar_minutes": {"name": "new_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_calendar_minutes": {"name": "open_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_agent_replies": {"name": "total_agent_replies", "description": "The total number of agent replies within the ticket, excluding comments where an agent created the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_age_minutes": {"name": "requester_last_login_age_minutes", "description": "The time in minutes since the ticket requester was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_age_minutes": {"name": "assignee_last_login_age_minutes", "description": "The time in minutes since the ticket assignee was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_minutes": {"name": "unsolved_ticket_age_minutes", "description": "The time in minutes the ticket has been in an unsolved state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_since_update_minutes": {"name": "unsolved_ticket_age_since_update_minutes", "description": "The time in minutes the ticket has been unsolved since the last update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_two_touch_resolution": {"name": "is_two_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_multi_touch_resolution": {"name": "is_multi_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two or more public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_last_comment_date": {"name": "ticket_last_comment_date", "description": "The time the last comment was applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_unassigned_duration_calendar_minutes": {"name": "ticket_unassigned_duration_calendar_minutes", "description": "The time in minutes the ticket was in an unassigned state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_status_assignment_date": {"name": "last_status_assignment_date", "description": "The time the status was last changed on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the ticket has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513416.418828, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"", "raw_code": "with ticket_enriched as (\n\n select *\n from {{ ref('zendesk__ticket_enriched') }}\n\n), ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_reply_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times_calendar') }}\n\n), ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comment_metrics') }}\n\n), ticket_work_time_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_calendar') }}\n\n-- business hour CTEs\n{% if var('using_schedules', True) %}\n\n), ticket_first_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_resolution_time_business') }}\n\n), ticket_full_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_full_resolution_time_business') }}\n\n), ticket_work_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_business') }}\n\n), ticket_first_reply_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_reply_time_business') }}\n\n{% endif %}\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.requester_last_login_at\", dbt.current_timestamp(), 'second') }} /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.assignee_last_login_at\", dbt.current_timestamp(), 'second') }} /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.created_at\", dbt.current_timestamp(), 'second') }} /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.updated_at\", dbt.current_timestamp(), 'second') }} /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n{% if var('using_schedules', True) %}\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n{% else %}\n\n) \n\nselect *\nfrom calendar_hour_metrics\n\n{% endif %}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}, {"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__comment_metrics", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_full_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_reply_time_business", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.datediff"], "nodes": ["model.zendesk.zendesk__ticket_enriched", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_first_reply_time_business"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n), __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n), __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n), __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n), ticket_enriched as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\n\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_reply_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times_calendar\n\n), ticket_comments as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__comment_metrics\"\n\n), ticket_work_time_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_calendar\n\n-- business hour CTEs\n\n\n), ticket_first_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_resolution_time_business\n\n), ticket_full_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_full_resolution_time_business\n\n), ticket_work_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_business\n\n), ticket_first_reply_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_reply_time_business\n\n\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.requester_last_login_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.requester_last_login_at)::timestamp)))\n /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.assignee_last_login_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.assignee_last_login_at)::timestamp)))\n /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.created_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.created_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.created_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.created_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((now())::date - (ticket_enriched.updated_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (ticket_enriched.updated_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (ticket_enriched.updated_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (ticket_enriched.updated_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}, {"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "sql": " __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_business", "sql": " __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_summary": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_summary", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_summary.sql", "original_file_path": "models/zendesk__ticket_summary.sql", "unique_id": "model.zendesk.zendesk__ticket_summary", "fqn": ["zendesk", "zendesk__ticket_summary"], "alias": "zendesk__ticket_summary", "checksum": {"name": "sha256", "checksum": "085f6c784b70f6ca6f38a8f3d4defb1debb06049d0bb6fe1b778ad7638d08f2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A single record table containing Zendesk ticket and user summary metrics. These metrics are updated for the current day the model is run.", "columns": {"user_count": {"name": "user_count", "description": "Total count of users created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active_agent_count": {"name": "active_agent_count", "description": "Total count of agents", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_user_count": {"name": "deleted_user_count", "description": "Total deleted user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_user_count": {"name": "end_user_count", "description": "Total end user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended_user_count": {"name": "suspended_user_count", "description": "Total count of users in a suspended state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_ticket_count": {"name": "new_ticket_count", "description": "Total count of tickets in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_ticket_count": {"name": "on_hold_ticket_count", "description": "Total count of tickets in the \"hold\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_ticket_count": {"name": "open_ticket_count", "description": "Total count of tickets in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "pending_ticket_count": {"name": "pending_ticket_count", "description": "Total count of tickets in the \"pending\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solved_ticket_count": {"name": "solved_ticket_count", "description": "Total count of solved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_ticket_count": {"name": "problem_ticket_count", "description": "Total count of tickets labeled as problems", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reassigned_ticket_count": {"name": "reassigned_ticket_count", "description": "Total count of tickets that have been reassigned", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reopened_ticket_count": {"name": "reopened_ticket_count", "description": "Total count of tickets that have been reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "surveyed_satisfaction_ticket_count": {"name": "surveyed_satisfaction_ticket_count", "description": "Total count of tickets that have been surveyed for a satisfaction response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unassigned_unsolved_ticket_count": {"name": "unassigned_unsolved_ticket_count", "description": "Total count of tickets that are unassigned and unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_ticket_count": {"name": "unreplied_ticket_count", "description": "Total count of tickets that have not had a reply", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_unsolved_ticket_count": {"name": "unreplied_unsolved_ticket_count", "description": "Total count of tickets that have not had a reply and are unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_count": {"name": "unsolved_ticket_count", "description": "Total count of unsolved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assigned_ticket_count": {"name": "assigned_ticket_count", "description": "Total count of assigned tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_ticket_count": {"name": "deleted_ticket_count", "description": "Total count of deleted tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recovered_ticket_count": {"name": "recovered_ticket_count", "description": "Total count of tickets that were deleted then reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513416.4226298, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_summary\"", "raw_code": "with ticket_metrics as (\n select *\n from {{ ref('zendesk__ticket_metrics') }}\n\n), user_table as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), user_sum as (\n select\n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int"], "nodes": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_summary.sql", "compiled": true, "compiled_code": "with ticket_metrics as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\n\n), user_table as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), user_sum as (\n select\n cast(1 as integer) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as integer) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_field_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_field_history.sql", "original_file_path": "models/zendesk__ticket_field_history.sql", "unique_id": "model.zendesk.zendesk__ticket_field_history", "fqn": ["zendesk", "zendesk__ticket_field_history"], "alias": "zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "2fea56dd7631d630021a96594da99a1b65affd7ec6d7a5a913ef3fc0b7759949"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable and the corresponding updater fields defined in the `ticket_field_history_updater_columns` variable.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_day_id": {"name": "ticket_day_id", "description": "The unique key of the table, a surrogate key of date_day and ticket_id.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The assignee id assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1728513416.40701, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"", "raw_code": "{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month' } if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{%- set change_data_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_scd')) -%}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_scd') }}\n \n {% if is_incremental() %}\n where valid_from >= (select max(date_day) from {{ this }})\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from {{ this }}\n where date_day = (select max(date_day) from {{ this }} )\n\n{% endif %}\n\n), calendar as (\n\n select *\n from {{ ref('int_zendesk__field_calendar_spine') }}\n where date_day <= current_date\n {% if is_incremental() %}\n and date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n {% if is_incremental() %} \n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , coalesce(change_data.{{ col.name }}, most_recent_data.{{ col.name }}) as {{ col.name }}\n {% endfor %}\n \n {% else %}\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , {{ col.name }}\n {% endfor %}\n {% endif %}\n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n {% if is_incremental() %}\n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n {% endif %}\n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n , {{ col.name }}\n -- create a batch/partition once a new value is provided\n , sum( case when {{ col.name }} is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as {{ col.name }}_field_partition\n\n {% endfor %}\n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n -- grab the value that started this batch/partition\n , first_value( {{ col.name }} ) over (\n partition by ticket_id, {{ col.name }}_field_partition \n order by date_day asc rows between unbounded preceding and current row) as {{ col.name }}\n {% endfor %}\n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( {{ col.name }} as {{ dbt.type_string() }} ) = 'is_null' then null else {{ col.name }} end as {{ col.name }}\n {% endfor %}\n\n from fill_values\n\n), surrogate_key as (\n\n select\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_calendar_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.type_string"], "nodes": ["model.zendesk.int_zendesk__field_history_scd", "model.zendesk.int_zendesk__field_calendar_spine"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with change_data as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_scd\"\n \n \n where valid_from >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\")\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"\n where date_day = (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\" )\n\n\n\n), calendar as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\"\n where date_day <= current_date\n \n and date_day >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\")\n \n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n \n \n , coalesce(change_data.status, most_recent_data.status) as status\n \n , coalesce(change_data.assignee_id, most_recent_data.assignee_id) as assignee_id\n \n , coalesce(change_data.priority, most_recent_data.priority) as priority\n \n \n \n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n \n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n \n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n \n , status\n -- create a batch/partition once a new value is provided\n , sum( case when status is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as status_field_partition\n\n \n , assignee_id\n -- create a batch/partition once a new value is provided\n , sum( case when assignee_id is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as assignee_id_field_partition\n\n \n , priority\n -- create a batch/partition once a new value is provided\n , sum( case when priority is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as priority_field_partition\n\n \n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n \n -- grab the value that started this batch/partition\n , first_value( status ) over (\n partition by ticket_id, status_field_partition \n order by date_day asc rows between unbounded preceding and current row) as status\n \n -- grab the value that started this batch/partition\n , first_value( assignee_id ) over (\n partition by ticket_id, assignee_id_field_partition \n order by date_day asc rows between unbounded preceding and current row) as assignee_id\n \n -- grab the value that started this batch/partition\n , first_value( priority ) over (\n partition by ticket_id, priority_field_partition \n order by date_day asc rows between unbounded preceding and current row) as priority\n \n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( status as TEXT ) = 'is_null' then null else status end as status\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( assignee_id as TEXT ) = 'is_null' then null else assignee_id end as assignee_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( priority as TEXT ) = 'is_null' then null else priority end as priority\n \n\n from fill_values\n\n), surrogate_key as (\n\n select\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__sla_policies": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__sla_policies", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__sla_policies.sql", "original_file_path": "models/zendesk__sla_policies.sql", "unique_id": "model.zendesk.zendesk__sla_policies", "fqn": ["zendesk", "zendesk__sla_policies"], "alias": "zendesk__sla_policies", "checksum": {"name": "sha256", "checksum": "7f12fd205228c0344bec4ae967a46c692bbede3209008a5648f86be4777550ca"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents an SLA policy event and additional sla breach and achievement metrics. Calendar and business hour SLA breaches for `first_reply_time`, `next_reply_time`, `requester_wait_time`, and `agent_work_time` are supported. If there is a SLA you would like supported that is not included, please create a feature request.", "columns": {"sla_event_id": {"name": "sla_event_id", "description": "A surrogate key generated from the combination of ticket_id, metric, and sla_applied_at fields", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_policy_name": {"name": "sla_policy_name", "description": "The name of the SLA policy associated with the SLA metric", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "metric": {"name": "metric", "description": "The SLA metric, either agent_work_time, requester_wait_time, first_reply_time or next_reply_time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_applied_at": {"name": "sla_applied_at", "description": "When the SLA target was triggered. This is the starting time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "target": {"name": "target", "description": "The SLA target, in minutes", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "in_business_hours": {"name": "in_business_hours", "description": "Boolean field indicating if the SLA target is in business hours (true) or calendar hours (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_breach_at": {"name": "sla_breach_at", "description": "The time or expected time of the SLA breach or achieve event.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_elapsed_time": {"name": "sla_elapsed_time", "description": "The total elapsed time to achieve the SLA metric whether breached or achieved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active_sla": {"name": "is_active_sla", "description": "Boolean field indicating that the SLA event is currently active and not breached (true) or past (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_sla_breach": {"name": "is_sla_breach", "description": "Boolean field indicating if the SLA has been breached (true) or was achieved (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513416.40608, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__sla_policies\"", "raw_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from {{ ref('int_zendesk__reply_time_combined') }}\n\n), agent_work_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_calendar_hours') }}\n\n), requester_wait_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), agent_work_business_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_business_hours') }}\n\n), requester_wait_business_sla as (\n select *\n from {{ ref('int_zendesk__requester_wait_time_business_hours') }}\n\n{% endif %}\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n{% if var('using_schedules', True) %}\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n{% endif %}\n\n)\n\nselect \n {{ dbt_utils.generate_surrogate_key(['ticket_id', 'metric', 'sla_applied_at']) }} as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then ({{ dbt.datediff(\"sla_applied_at\", dbt.current_timestamp(), 'second') }} / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > {{ dbt.current_timestamp() }})\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_combined", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_business_hours", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.max_bool", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.current_timestamp", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__sla_policies.sql", "compiled": true, "compiled_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_combined\"\n\n), agent_work_calendar_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"\n\n), requester_wait_calendar_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"\n\n\n\n), agent_work_business_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"\n\n), requester_wait_business_sla as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"\n\n\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n\n\n)\n\nselect \n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(metric as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sla_applied_at as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then (\n (\n (\n (\n ((now())::date - (sla_applied_at)::date)\n * 24 + date_part('hour', (now())::timestamp) - date_part('hour', (sla_applied_at)::timestamp))\n * 60 + date_part('minute', (now())::timestamp) - date_part('minute', (sla_applied_at)::timestamp))\n * 60 + floor(date_part('second', (now())::timestamp)) - floor(date_part('second', (sla_applied_at)::timestamp)))\n / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > now())\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_backlog": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__ticket_backlog", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_backlog.sql", "original_file_path": "models/zendesk__ticket_backlog.sql", "unique_id": "model.zendesk.zendesk__ticket_backlog", "fqn": ["zendesk", "zendesk__ticket_backlog"], "alias": "zendesk__ticket_backlog", "checksum": {"name": "sha256", "checksum": "546f8460ab16ce0f4671b1ae5742bfdb0f97bc4184c9da30cd21de81400922f7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable for all backlog tickets. Backlog tickets being defined as any ticket not a 'closed', 'deleted', or 'solved' status.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel where the ticket was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The assignee name assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513416.423084, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_backlog\"", "raw_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n{{ config(enabled = 'status' in var('ticket_field_history_columns')) }}\n\nwith ticket_field_history as (\n select *\n from {{ ref('zendesk__ticket_field_history') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), group_names as (\n select *\n from {{ ref('stg_zendesk__group') }}\n\n), users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), brands as (\n select *\n from {{ ref('stg_zendesk__brand') }}\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n{% if 'ticket_form_id' in var('ticket_field_history_columns') %}\n), ticket_forms as (\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), organizations as (\n select *\n from {{ ref('stg_zendesk__organization') }}\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n {% for col in var('ticket_field_history_columns') if col != 'status' %} --Looking at all history fields the users passed through in their dbt_project.yml file\n {% if col in ['assignee_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n {% elif col in ['requester_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,requester.name as requester_name\n\n {% elif col in ['ticket_form_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,ticket_forms.name as ticket_form_name\n\n {% elif col in ['organization_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,organizations.name as organization_name\n\n {% elif col in ['brand_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,brands.name as brand_name\n\n {% elif col in ['group_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,group_names.name as group_name\n\n {% elif col in ['locale_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.locale as local_name\n\n {% else %} --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.{{ col }}\n {% endif %}\n {% endfor %}\n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n {% if 'ticket_form_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join ticket_forms\n on ticket_forms.ticket_form_id = cast(ticket_field_history.ticket_form_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'group_id' in var('ticket_field_history_columns') %}--Join not needed if field is not located in variable, otherwise it is included.\n left join group_names\n on group_names.group_id = cast(ticket_field_history.group_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'assignee_id' in var('ticket_field_history_columns') or 'requester_id' in var('ticket_field_history_columns') or 'locale_id' in var('ticket_field_history_columns')%} --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'requester_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join users as requester\n on requester.user_id = cast(ticket_field_history.requester_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'brand_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join brands\n on brands.brand_id = cast(ticket_field_history.brand_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'organization_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join organizations\n on organizations.organization_id = cast(ticket_field_history.organization_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "language": "sql", "refs": [{"name": "zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}, {"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_bigint"], "nodes": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_backlog.sql", "compiled": true, "compiled_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n\n\nwith ticket_field_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_field_history\"\n\n), tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), group_names as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), brands as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n --Looking at all history fields the users passed through in their dbt_project.yml file\n --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n \n --Looking at all history fields the users passed through in their dbt_project.yml file\n --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.priority\n \n \n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n \n\n \n\n --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as bigint)\n \n\n \n\n \n\n \n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__sla_policy_applied": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/int_zendesk__sla_policy_applied.sql", "original_file_path": "models/sla_policy/int_zendesk__sla_policy_applied.sql", "unique_id": "model.zendesk.int_zendesk__sla_policy_applied", "fqn": ["zendesk", "sla_policy", "int_zendesk__sla_policy_applied"], "alias": "int_zendesk__sla_policy_applied", "checksum": {"name": "sha256", "checksum": "e3fdf31f14e332d08049e6ad3a865a8a8776755ada75ddb655a6cc72a61b9d15"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.71707, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"", "raw_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), sla_policy_name as (\n\n select \n *\n from {{ ref('int_zendesk__updates') }}\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast({{ fivetran_utils.json_parse('ticket_field_history.value', ['minutes']) }} as {{ dbt.type_int() }} ) as target,\n {{ fivetran_utils.json_parse('ticket_field_history.value', ['in_business_hours']) }} = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, {{ dbt.current_timestamp() }}) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.json_parse", "macro.dbt.type_int", "macro.dbt.current_timestamp"], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__ticket_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/int_zendesk__sla_policy_applied.sql", "compiled": true, "compiled_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), sla_policy_name as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast(\n\n ticket_field_history.value::json #>> '{minutes}'\n\n as integer ) as target,\n \n\n ticket_field_history.value::json #>> '{in_business_hours}'\n\n = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, now()) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_business_hours"], "alias": "int_zendesk__agent_work_time_business_hours", "checksum": {"name": "sha256", "checksum": "430c95ca8321909d770cb8caae56a0bdc90d91b889969ddcdfb4725b1bc5f903"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513415.72256, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n \n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_agent_work_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('valid_starting_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \" )\"\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n)\n\nselect * \nfrom agent_work_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_agent_work_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp ) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n)\n\nselect * \nfrom agent_work_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_calendar_hours"], "alias": "int_zendesk__agent_work_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "f25752139fd2e10c5d666783a5abbf36e9d81b6a4e0012f6e42d816e8d20aa81"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.743449, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"", "raw_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_filtered_statuses"], "alias": "int_zendesk__agent_work_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "3d9208f477a6aa3dcf000568e9ca35d8edbdc8c7d47223f34bb1f1aa0f609902"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.747464, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"", "raw_code": "with agent_work_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with agent_work_time_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n now() + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_business_hours"], "alias": "int_zendesk__reply_time_business_hours", "checksum": {"name": "sha256", "checksum": "12c0706c03db8c187b66676360dc7ae36eb9db9b9c36324366854ec9ca03448d"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513415.751113, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), ticket_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from {{ ref('stg_zendesk__schedule') }}\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(sla_policy_applied.sla_applied_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n {{ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') }} as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_created_at') }} <= sla_policy_applied.sla_applied_at\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_invalidated_at') }} > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n {{ dbt_utils.group_by(n=14) }}\n\n), week_index_calc as (\n select \n *,\n {{ dbt.datediff(\"sla_applied_at\", \"least(coalesce(first_reply_time, \" ~ dbt.current_timestamp() ~ \"), coalesce(first_solved_time, \" ~ dbt.current_timestamp() ~ \"))\", \"week\") }} + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast((7*24*60) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n {{ dbt_date.week_start('sla_applied_at','UTC') }} as starting_point,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_breach_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_start_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_start_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_end_at,\n {{ dbt_date.week_end(\"sla_applied_at\", tz=\"America/UTC\") }} as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "language": "sql", "refs": [{"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.fivetran_utils.timestamp_add", "macro.dbt_utils.group_by", "macro.dbt.current_timestamp", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt_date.week_end"], "nodes": ["model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), sla_policy_applied as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), ticket_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n (\n (\n (\n (\n ((cast(sla_policy_applied.sla_applied_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and \n\n ticket_schedules.schedule_created_at + ((interval '1 second') * (-1))\n\n <= sla_policy_applied.sla_applied_at\n and \n\n ticket_schedules.schedule_invalidated_at + ((interval '1 second') * (-1))\n\n > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10,11,12,13,14\n\n), week_index_calc as (\n select \n *,\n \n (\n ((least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::date - (sla_applied_at)::date)\n / 7 + case\n when date_part('dow', (sla_applied_at)::timestamp) <= date_part('dow', (least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::timestamp) then\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 0 else -1 end\n else\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 1 else 0 end\n end)\n + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as integer) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast((7*24*60) as integer) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as starting_point,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as integer )))\n\n as sla_breach_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_start_time) as integer )))\n\n as sla_schedule_start_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time) as integer )))\n\n as sla_schedule_end_at,\n cast(\n\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) + ((interval '1 day') * (6))\n\n as date) as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_calendar_hours"], "alias": "int_zendesk__reply_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "6ec2775efbac4d405efd0b30a1ec5c593e140c3f4a1be4ff8df7fd0cd4791a2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.76655, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"", "raw_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), final as (\n select\n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(target as \" ~ dbt.type_int() ~ \" )\",\n \"sla_applied_at\" ) }} as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "compiled": true, "compiled_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), final as (\n select\n *,\n \n\n sla_applied_at + ((interval '1 minute') * (cast(target as integer )))\n\n as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_combined": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__reply_time_combined", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_combined", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_combined"], "alias": "int_zendesk__reply_time_combined", "checksum": {"name": "sha256", "checksum": "3a7a8ddea0400ea314ff4ae83b81654414788634e76af330bf27c384733ac43b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.770056, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_combined\"", "raw_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from {{ ref('int_zendesk__reply_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), reply_time_business_hours_sla as (\n\n select *\n from {{ ref('int_zendesk__reply_time_business_hours') }}\n\n{% endif %}\n\n), ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as {{ dbt.type_numeric() }}) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as {{ dbt.type_numeric() }}) as week_number,\n cast(null as {{ dbt.type_numeric() }}) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n{% if var('using_schedules', True) %}\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n{% endif %}\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n {{ dbt_utils.group_by(n=10) }}\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n {{ dbt.datediff(\"sla_schedule_start_at\", \"agent_reply_at\", 'second') }} / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and {{ dbt.current_timestamp() }} >= sla_schedule_start_at and ({{ dbt.current_timestamp() }} < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= {{ dbt.current_timestamp() }}) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n {{ dbt.current_timestamp() }} as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + ({{ dbt.datediff(\"sla_schedule_start_at\", \"coalesce(agent_reply_at, next_solved_at, current_time_check)\", 'second') }} / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__reply_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_numeric", "macro.dbt_utils.group_by", "macro.dbt.datediff", "macro.dbt.current_timestamp"], "nodes": ["model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "compiled": true, "compiled_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"\n\n\n\n), reply_time_business_hours_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"\n\n\n\n), ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as numeric(28,6)) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as numeric(28,6)) as week_number,\n cast(null as numeric(28,6)) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n \n (\n (\n (\n ((agent_reply_at)::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (agent_reply_at)::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (agent_reply_at)::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (agent_reply_at)::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and now() >= sla_schedule_start_at and (now() < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= now()) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n now() as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + (\n (\n (\n (\n ((coalesce(agent_reply_at, next_solved_at, current_time_check))::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_calendar_hours"], "alias": "int_zendesk__requester_wait_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "adaa86b537177e2792f3b8e48def56a520c6a442b11f3859c649f549d4b60087"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.7773728, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"", "raw_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_business_hours"], "alias": "int_zendesk__requester_wait_time_business_hours", "checksum": {"name": "sha256", "checksum": "5562a77785bebf0f99e2d574f4b762ca5149c3c92245a7e35b345bf3ffb1cb00"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513415.781767, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes --- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_requester_wait_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('valid_starting_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \" )\"\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n coalesce(schedule.start_time_utc, 0) as schedule_start_time, -- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n schedule.end_time_utc as schedule_end_time,\n coalesce(\n least(ticket_week_end_time_minute, schedule.end_time_utc)\n - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc),\n 0) as scheduled_minutes --- fill 0 for schedules completely outside schedule window. Only necessary for this field for use downstream.\n from weekly_period_requester_wait_time\n left join schedule -- using a left join to account for tickets started and completed entirely outside of a schedule, otherwise they are filtered out\n on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp ) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_filtered_statuses"], "alias": "int_zendesk__requester_wait_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "3dcdd6a267ee2ec704192d6e14b7af92ba52316f66389455c5bf3d0c73649188"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.7910972, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"", "raw_code": "with requester_wait_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with requester_wait_time_sla as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n now() + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_reply_times", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times"], "alias": "int_zendesk__ticket_reply_times", "checksum": {"name": "sha256", "checksum": "6de1b30f99a9bbd078c823538ca0e87c5b57d33160f65c290ecd67765e8d4472"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728513415.795562, "relation_name": null, "raw_code": "with ticket_public_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n ({{ dbt.datediff(\n 'end_user_comment_created_at',\n 'agent_responded_at',\n 'second') }} / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_reply_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times_calendar.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times_calendar"], "alias": "int_zendesk__ticket_reply_times_calendar", "checksum": {"name": "sha256", "checksum": "6fb6a60134019d78fcfc8c135b4a7887b3ce52ec53d8db463194f7824d2c71c2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728513415.799264, "relation_name": null, "raw_code": "with ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_reply_times"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comments_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comments_enriched", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__comments_enriched.sql", "original_file_path": "models/reply_times/int_zendesk__comments_enriched.sql", "unique_id": "model.zendesk.int_zendesk__comments_enriched", "fqn": ["zendesk", "reply_times", "int_zendesk__comments_enriched"], "alias": "int_zendesk__comments_enriched", "checksum": {"name": "sha256", "checksum": "970004a2aa343ae78a3f810828600c7eca8585428b52b05e4353f9debc6f1af5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728513415.801632, "relation_name": null, "raw_code": "with ticket_comment as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'comment'\n\n), users as (\n\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__comments_enriched.sql", "compiled": true, "compiled_code": "with ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_reply_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_first_reply_time_business", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_first_reply_time_business.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_first_reply_time_business"], "alias": "int_zendesk__ticket_first_reply_time_business", "checksum": {"name": "sha256", "checksum": "0bacc5f74a5eac2a55c2b0bacb1a0b7908783948ad162b84c230be9310dd02b5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728513415.80294, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n), ticket_schedules as (\n\n select \n *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_enriched": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_enriched", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_enriched.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_enriched.sql", "unique_id": "model.zendesk.int_zendesk__field_history_enriched", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_enriched"], "alias": "int_zendesk__field_history_enriched", "checksum": {"name": "sha256", "checksum": "cdf920b1df5fee8c6a08b0e26996028d327964903e8acc4dd15498d23c00005c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728513415.810275, "relation_name": null, "raw_code": "with ticket_field_history as (\n\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), updater_info as (\n select *\n from {{ ref('int_zendesk__updater_information') }}\n\n), final as (\n select\n ticket_field_history.*\n\n {% if var('ticket_field_history_updater_columns')%} --The below will be run if any fields are included in the variable within the dbt_project.yml.\n {% for col in var('ticket_field_history_updater_columns') %} --Iterating through the updater fields included in the variable.\n\n --The below statements are needed to populate Zendesk automated fields for when the zendesk triggers automatically change fields based on user defined triggers.\n {% if col in ['updater_is_active'] %}\n ,coalesce(updater_info.{{ col|lower }}, true) as {{ col }}\n\n {% elif col in ['updater_user_id','updater_organization_id'] %}\n ,coalesce(updater_info.{{ col|lower }}, -1) as {{ col }}\n \n {% elif col in ['updater_last_login_at'] %}\n ,coalesce(updater_info.{{ col|lower }}, current_timestamp) as {{ col }}\n \n {% else %}\n ,coalesce(updater_info.{{ col|lower }}, concat('zendesk_trigger_change_', '{{ col }}' )) as {{ col }}\n \n {% endif %}\n {% endfor %}\n {% endif %} \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "int_zendesk__updater_information", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk.int_zendesk__updater_information"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_enriched.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_pivot": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_pivot", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_pivot.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_pivot.sql", "unique_id": "model.zendesk.int_zendesk__field_history_pivot", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_pivot"], "alias": "int_zendesk__field_history_pivot", "checksum": {"name": "sha256", "checksum": "077bf8d76ba0523c2ebb987be0fd0746acbdae8fdbdd39fc7a03203a5d070f87"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1728513415.8151019, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\"", "raw_code": "-- depends_on: {{ source('zendesk', 'ticket_field_history') }}\n\n{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{% if execute -%}\n {% set results = run_query('select distinct field_name from ' ~ source('zendesk', 'ticket_field_history') ) %}\n {% set results_list = results.columns[0].values() %}\n{% endif -%}\n\nwith field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n ,\n {{ var('ticket_field_history_updater_columns') | join (\", \")}}\n\n {% endif %}\n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from {{ ref('int_zendesk__field_history_enriched') }}\n {% if is_incremental() %}\n where cast( {{ dbt.date_trunc('day', 'valid_starting_at') }} as date) >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast({{ dbt.date_trunc('day', 'valid_starting_at') }} as date) as date_day\n\n {% for col in results_list if col in var('ticket_field_history_columns') %}\n {% set col_xf = col|lower %}\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.value end) as {{ col_xf }}\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n\n {% for upd in var('ticket_field_history_updater_columns') %}\n\n {% set upd_xf = (col|lower + '_' + upd ) %} --Creating the appropriate column name based on the history field + update field names.\n\n {% if upd == 'updater_is_active' and target.type in ('postgres', 'redshift') %}\n\n ,bool_or(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% else %}\n\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% endif %}\n {% endfor %}\n {% endif %}\n {% endfor %}\n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n {{ dbt_utils.generate_surrogate_key(['ticket_id','date_day'])}} as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_enriched", "package": null, "version": null}], "sources": [["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.date_trunc", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.run_query"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history", "model.zendesk.int_zendesk__field_history_enriched"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_pivot.sql", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"\n\n\n\n\n \nwith __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n), field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from __dbt__cte__int_zendesk__field_history_enriched\n \n where cast( date_trunc('day', valid_starting_at) as date) >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\")\n \n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast(date_trunc('day', valid_starting_at) as date) as date_day\n\n \n \n ,min(case when lower(field_name) = 'status' then filtered.value end) as status\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'assignee_id' then filtered.value end) as assignee_id\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'priority' then filtered.value end) as priority\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}, {"id": "model.zendesk.int_zendesk__field_history_enriched", "sql": " __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updater_information": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updater_information", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__updater_information.sql", "original_file_path": "models/ticket_history/int_zendesk__updater_information.sql", "unique_id": "model.zendesk.int_zendesk__updater_information", "fqn": ["zendesk", "ticket_history", "int_zendesk__updater_information"], "alias": "int_zendesk__updater_information", "checksum": {"name": "sha256", "checksum": "62a690646cff991c0e0b6e205440a070bb44aab8d4d9286714710c52a4c6677a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728513415.8238049, "relation_name": null, "raw_code": "with users as (\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), organizations as (\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,users.user_tags as updater_user_tags\n {% endif %}\n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,organizations.domain_names as updater_organization_domain_names\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,organizations.organization_tags as updater_organization_organization_tags\n {% endif %}\n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__updater_information.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_scd": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_history_scd", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_scd.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_scd.sql", "unique_id": "model.zendesk.int_zendesk__field_history_scd", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_scd"], "alias": "int_zendesk__field_history_scd", "checksum": {"name": "sha256", "checksum": "a748f9163dc6edaca993c8a3f5e3cecc9d057d3b47817d403e0b0778deda2466"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.828048, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_scd\"", "raw_code": "-- model needs to materialize as a table to avoid erroneous null values\n{{ config( materialized='table') }} \n\n{% set ticket_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_pivot')) %}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_pivot') }}\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,{{ col.name }}\n ,sum(case when {{ col.name }} is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as {{ col.name }}_field_partition\n {% endfor %}\n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,first_value( {{ col.name }} ) over (partition by {{ col.name }}_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as {{ col.name }}\n \n {% endfor %}\n from set_values\n) \n\nselect *\nfrom fill_values", "language": "sql", "refs": [{"name": "int_zendesk__field_history_pivot", "package": null, "version": null}, {"name": "int_zendesk__field_history_pivot", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__field_history_pivot"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_scd.sql", "compiled": true, "compiled_code": "-- model needs to materialize as a table to avoid erroneous null values\n \n\n\n\nwith change_data as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_history_pivot\"\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n \n\n ,status\n ,sum(case when status is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as status_field_partition\n \n\n ,assignee_id\n ,sum(case when assignee_id is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as assignee_id_field_partition\n \n\n ,priority\n ,sum(case when priority is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as priority_field_partition\n \n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n \n\n ,first_value( status ) over (partition by status_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as status\n \n \n\n ,first_value( assignee_id ) over (partition by assignee_id_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as assignee_id\n \n \n\n ,first_value( priority ) over (partition by priority_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as priority\n \n \n from set_values\n) \n\nselect *\nfrom fill_values", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_calendar_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_calendar_spine.sql", "original_file_path": "models/ticket_history/int_zendesk__field_calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__field_calendar_spine", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_calendar_spine"], "alias": "int_zendesk__field_calendar_spine", "checksum": {"name": "sha256", "checksum": "01739353b5d9fec39fe39ca428ceb43b51a64bd7408d1f4825fcf1d598fb15ca"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1728513415.8617852, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\"", "raw_code": "{{\n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n )\n}}\n\nwith calendar as (\n\n select *\n from {{ ref('int_zendesk__calendar_spine') }}\n {% if is_incremental() %}\n where date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( {{ dbt.date_trunc('day', \"case when status != 'closed' then \" ~ dbt.current_timestamp() ~ \" else updated_at end\") }} as date) as open_until\n from {{ var('ticket') }}\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and {{ dbt.dateadd('month', var('ticket_field_history_extension_months', 0), 'ticket.open_until') }} >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__calendar_spine", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.current_timestamp", "macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_utils.generate_surrogate_key"], "nodes": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_calendar_spine.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwith spine as (\n\n \n\n \n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n + \n \n p11.generated_number * power(2, 11)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n cross join \n \n p as p11\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 3211\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2016-01-01' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast\n), calendar as (\n\n select *\n from __dbt__cte__int_zendesk__calendar_spine\n \n where date_day >= (select max(date_day) from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__field_calendar_spine\")\n \n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( date_trunc('day', case when status != 'closed' then now() else updated_at end) as date) as open_until\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and \n\n ticket.open_until + ((interval '1 month') * (0))\n\n >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__calendar_spine", "sql": " __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwith spine as (\n\n \n\n \n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n + \n \n p11.generated_number * power(2, 11)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n cross join \n \n p as p11\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 3211\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2016-01-01' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_work_time_calendar", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_calendar"], "alias": "int_zendesk__ticket_work_time_calendar", "checksum": {"name": "sha256", "checksum": "e3cda559c663cc0e6ef1defcf5d8c418bbb9c20bb60aa118fc698579b3c37814"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728513415.867981, "relation_name": null, "raw_code": "with ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "compiled": true, "compiled_code": "with ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_work_time_business", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_business.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_business", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_business"], "alias": "int_zendesk__ticket_work_time_business", "checksum": {"name": "sha256", "checksum": "9ea4023c98c8bdebaf01445490e058d4766cb32a45db569e01e91fa8eac2e689"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728513415.869216, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where {{ dbt.datediff('greatest(valid_starting_at, schedule_created_at)', 'least(valid_ending_at, schedule_invalidated_at)', 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.status_schedule_start as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.status_schedule_start',\n 'ticket_status_crossed_with_schedule.status_schedule_end',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=7) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "compiled": true, "compiled_code": "\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__calendar_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "utils/int_zendesk__calendar_spine.sql", "original_file_path": "models/utils/int_zendesk__calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__calendar_spine", "fqn": ["zendesk", "utils", "int_zendesk__calendar_spine"], "alias": "int_zendesk__calendar_spine", "checksum": {"name": "sha256", "checksum": "722fbe199f8263916801adf6a6f035c8dc37de056bbd359bd9c42f834b3f3ef3"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728513415.876569, "relation_name": null, "raw_code": "-- depends_on: {{ var('ticket') }}\nwith spine as (\n\n {% if execute and flags.WHICH in ('run', 'build') %}\n\n {%- set first_date_query %}\n select \n coalesce(\n min(cast(created_at as date)), \n cast({{ dbt.dateadd(\"month\", -1, \"current_date\") }} as date)\n ) as min_date\n from {{ var('ticket') }}\n -- by default take all the data \n where cast(created_at as date) >= {{ dbt.dateadd('year', \n - var('ticket_field_history_timeframe_years', 50), \"current_date\") }}\n {% endset -%}\n\n {%- set first_date = dbt_utils.get_single_value(first_date_query) %}\n\n {% else %}\n {%- set first_date = '2016-01-01' %}\n\n {% endif %}\n\n{{\n dbt_utils.date_spine(\n datepart = \"day\", \n start_date = \"cast('\" ~ first_date ~ \"' as date)\",\n end_date = dbt.dateadd(\"week\", 1, \"current_date\")\n ) \n}}\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_utils.date_spine"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/utils/int_zendesk__calendar_spine.sql", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwith spine as (\n\n \n\n \n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n + \n \n p11.generated_number * power(2, 11)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n cross join \n \n p as p11\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 3211\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2016-01-01' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n select\n cast(date_day as date) as date_day\n from spine\n)\n\nselect *\nfrom recast", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__timezone_daylight": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__timezone_daylight", "resource_type": "model", "package_name": "zendesk", "path": "utils/int_zendesk__timezone_daylight.sql", "original_file_path": "models/utils/int_zendesk__timezone_daylight.sql", "unique_id": "model.zendesk.int_zendesk__timezone_daylight", "fqn": ["zendesk", "utils", "int_zendesk__timezone_daylight"], "alias": "int_zendesk__timezone_daylight", "checksum": {"name": "sha256", "checksum": "021f733ee1abac848fb9d6cfff1c4981f24919f7ff0f59e9c2895654831d9dd8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728513415.8893912, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith timezone as (\n\n select *\n from {{ var('time_zone') }}\n\n), daylight_time as (\n\n select *\n from {{ var('daylight_time') }}\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp()) }} as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp() }} as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as {{ dbt.type_timestamp() }}) as valid_from,\n cast(valid_until as {{ dbt.type_timestamp() }}) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.dateadd", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone", "model.zendesk_source.stg_zendesk__daylight_time"]}, "compiled_path": "target/compiled/zendesk/models/utils/int_zendesk__timezone_daylight.sql", "compiled": true, "compiled_code": "\n\nwith timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n now() + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n now() + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(now() as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as timestamp) as valid_from,\n cast(valid_until as timestamp) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_resolution_times_calendar": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_resolution_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_resolution_times_calendar"], "alias": "int_zendesk__ticket_resolution_times_calendar", "checksum": {"name": "sha256", "checksum": "0c3e1e19084b3e1829c18b80315e8f64aaf63e94522fc56d64652e89b02afadc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1728513415.894538, "relation_name": null, "raw_code": "with historical_solved_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n where status = 'solved'\n\n), ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_historical_assignee as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_assignee') }}\n\n), ticket_historical_group as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_group') }}\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n {{ dbt.datediff(\n 'ticket_historical_assignee.first_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as first_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket_historical_assignee.last_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as last_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.first_solved_at',\n 'minute' ) }} as first_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.last_solved_at',\n 'minute') }} as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_assignee", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "compiled": true, "compiled_code": "with historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_resolution_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_first_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_first_resolution_time_business"], "alias": "int_zendesk__ticket_first_resolution_time_business", "checksum": {"name": "sha256", "checksum": "92b30d97de3fa5a059b70ef930d731bc7cfeb93a39206970f37ed605264c01af"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728513415.899159, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule\n on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_full_resolution_time_business": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_full_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_full_resolution_time_business"], "alias": "int_zendesk__ticket_full_resolution_time_business", "checksum": {"name": "sha256", "checksum": "c14c73bcfcc33dc8bc6a94827770c47f4e70f4608f3227bbbc1f10cbcad4c572"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728513415.906282, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk__document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/zendesk__document.sql", "original_file_path": "models/unstructured/zendesk__document.sql", "unique_id": "model.zendesk.zendesk__document", "fqn": ["zendesk", "unstructured", "zendesk__document"], "alias": "zendesk__document", "checksum": {"name": "sha256", "checksum": "0d3d8f2e10bcc679a958386cd5b13f616e17139821263f12c8dddef34c93b21b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk ticket, enriched with data about it's tags, assignees, requester, submitter, organization and group.", "columns": {"document_id": {"name": "document_id", "description": "Equivalent to `ticket_id`.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk_index": {"name": "chunk_index", "description": "The index of the chunk associated with the `document_id`.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk_tokens_approximate": {"name": "chunk_tokens_approximate", "description": "Approximate number of tokens for the chunk, assuming 4 characters per token.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "chunk": {"name": "chunk", "description": "The text of the chunk.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/unstructured/zendesk_unstructured.yml", "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513416.498335, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith ticket_document as (\n select *\n from {{ ref('int_zendesk__ticket_document') }}\n\n), grouped as (\n select *\n from {{ ref('int_zendesk__ticket_comment_documents_grouped') }}\n\n), final as (\n select\n cast(ticket_document.ticket_id as {{ dbt.type_string() }}) as document_id,\n grouped.chunk_index,\n grouped.chunk_tokens as chunk_tokens_approximate,\n {{ dbt.concat([\n \"ticket_document.ticket_markdown\",\n \"'\\\\n\\\\n## COMMENTS\\\\n\\\\n'\",\n \"grouped.comments_group_markdown\"]) }}\n as chunk\n from ticket_document\n join grouped\n on grouped.ticket_id = ticket_document.ticket_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__ticket_document", "package": null, "version": null}, {"name": "int_zendesk__ticket_comment_documents_grouped", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.concat"], "nodes": ["model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__ticket_comment_documents_grouped"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/zendesk__document.sql", "compiled": true, "compiled_code": "\n\nwith ticket_document as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_document\"\n\n), grouped as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_documents_grouped\"\n\n), final as (\n select\n cast(ticket_document.ticket_id as TEXT) as document_id,\n grouped.chunk_index,\n grouped.chunk_tokens as chunk_tokens_approximate,\n ticket_document.ticket_markdown || '\\n\\n## COMMENTS\\n\\n' || grouped.comments_group_markdown\n as chunk\n from ticket_document\n join grouped\n on grouped.ticket_id = ticket_document.ticket_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_comment_documents_grouped": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_documents_grouped", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "unique_id": "model.zendesk.int_zendesk__ticket_comment_documents_grouped", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_comment_documents_grouped"], "alias": "int_zendesk__ticket_comment_documents_grouped", "checksum": {"name": "sha256", "checksum": "ad03266e19d20396ca75812cb98816f3e11e078c63c30807790903674f4db42b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513415.917578, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_documents_grouped\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith filtered_comment_documents as (\n select *\n from {{ ref('int_zendesk__ticket_comment_document') }}\n),\n\ngrouped_comment_documents as (\n select \n ticket_id,\n comment_markdown,\n comment_tokens,\n comment_time,\n sum(comment_tokens) over (\n partition by ticket_id \n order by comment_time\n rows between unbounded preceding and current row\n ) as cumulative_length\n from filtered_comment_documents\n)\n\nselect \n ticket_id,\n cast({{ dbt_utils.safe_divide('floor(cumulative_length - 1)', var('zendesk_max_tokens', 5000)) }} as {{ dbt.type_int() }}) as chunk_index,\n {{ dbt.listagg(\n measure=\"comment_markdown\",\n delimiter_text=\"'\\\\n\\\\n---\\\\n\\\\n'\",\n order_by_clause=\"order by comment_time\"\n ) }} as comments_group_markdown,\n sum(comment_tokens) as chunk_tokens\nfrom grouped_comment_documents\ngroup by 1,2", "language": "sql", "refs": [{"name": "int_zendesk__ticket_comment_document", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.safe_divide", "macro.dbt.type_int", "macro.dbt.listagg"], "nodes": ["model.zendesk.int_zendesk__ticket_comment_document"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_comment_documents_grouped.sql", "compiled": true, "compiled_code": "\n\nwith filtered_comment_documents as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_document\"\n),\n\ngrouped_comment_documents as (\n select \n ticket_id,\n comment_markdown,\n comment_tokens,\n comment_time,\n sum(comment_tokens) over (\n partition by ticket_id \n order by comment_time\n rows between unbounded preceding and current row\n ) as cumulative_length\n from filtered_comment_documents\n)\n\nselect \n ticket_id,\n cast(\n ( floor(cumulative_length - 1) ) / nullif( ( 5000 ), 0)\n as integer) as chunk_index,\n \n string_agg(\n comment_markdown,\n '\\n\\n---\\n\\n'\n order by comment_time\n ) as comments_group_markdown,\n sum(comment_tokens) as chunk_tokens\nfrom grouped_comment_documents\ngroup by 1,2", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_comment_document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_comment_document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "unique_id": "model.zendesk.int_zendesk__ticket_comment_document", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_comment_document"], "alias": "int_zendesk__ticket_comment_document", "checksum": {"name": "sha256", "checksum": "e75f893dec0ca7599db16793ad9b39bf5d33f463abe6fa4d7be8019e095f45d8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513415.925162, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_comment_document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith ticket_comments as (\n select *\n from {{ var('ticket_comment') }}\n\n), users as (\n select *\n from {{ var('user') }}\n\n), comment_details as (\n select \n ticket_comments.ticket_comment_id,\n ticket_comments.ticket_id,\n {{ zendesk.coalesce_cast([\"users.email\", \"'UNKNOWN'\"], dbt.type_string()) }} as commenter_email,\n {{ zendesk.coalesce_cast([\"users.name\", \"'UNKNOWN'\"], dbt.type_string()) }} as commenter_name,\n ticket_comments.created_at as comment_time,\n ticket_comments.body as comment_body\n from ticket_comments\n left join users\n on ticket_comments.user_id = users.user_id\n where not coalesce(ticket_comments._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), comment_markdowns as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n cast(\n {{ dbt.concat([\n \"'### message from '\", \"commenter_name\", \"' ('\", \"commenter_email\", \"')\\\\n'\",\n \"'##### sent @ '\", \"comment_time\", \"'\\\\n'\",\n \"comment_body\"\n ]) }} as {{ dbt.type_string() }})\n as comment_markdown\n from comment_details\n\n), comments_tokens as (\n select\n *,\n {{ zendesk.count_tokens(\"comment_markdown\") }} as comment_tokens\n from comment_markdowns\n\n), truncated_comments as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n case when comment_tokens > {{ var('zendesk_max_tokens', 5000) }} then left(comment_markdown, {{ var('zendesk_max_tokens', 5000) }} * 4) -- approximate 4 characters per token\n else comment_markdown\n end as comment_markdown,\n case when comment_tokens > {{ var('zendesk_max_tokens', 5000) }} then {{ var('zendesk_max_tokens', 5000) }}\n else comment_tokens\n end as comment_tokens\n from comments_tokens\n)\n\nselect *\nfrom truncated_comments", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.zendesk.coalesce_cast", "macro.dbt.concat", "macro.zendesk.count_tokens"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_comment_document.sql", "compiled": true, "compiled_code": "\n\nwith ticket_comments as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), comment_details as (\n select \n ticket_comments.ticket_comment_id,\n ticket_comments.ticket_id,\n \n coalesce(\n cast(users.email as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as commenter_email,\n \n coalesce(\n cast(users.name as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as commenter_name,\n ticket_comments.created_at as comment_time,\n ticket_comments.body as comment_body\n from ticket_comments\n left join users\n on ticket_comments.user_id = users.user_id\n where not coalesce(ticket_comments._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), comment_markdowns as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n cast(\n '### message from ' || commenter_name || ' (' || commenter_email || ')\\n' || '##### sent @ ' || comment_time || '\\n' || comment_body as TEXT)\n as comment_markdown\n from comment_details\n\n), comments_tokens as (\n select\n *,\n \n \n\n length(\n comment_markdown\n ) / 4 -- 1 token is approximately 4 characters, and we only need an approximation here.\n as comment_tokens\n from comment_markdowns\n\n), truncated_comments as (\n select\n ticket_comment_id,\n ticket_id,\n comment_time,\n case when comment_tokens > 5000 then left(comment_markdown, 5000 * 4) -- approximate 4 characters per token\n else comment_markdown\n end as comment_markdown,\n case when comment_tokens > 5000 then 5000\n else comment_tokens\n end as comment_tokens\n from comments_tokens\n)\n\nselect *\nfrom truncated_comments", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_document": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_document", "resource_type": "model", "package_name": "zendesk", "path": "unstructured/intermediate/int_zendesk__ticket_document.sql", "original_file_path": "models/unstructured/intermediate/int_zendesk__ticket_document.sql", "unique_id": "model.zendesk.int_zendesk__ticket_document", "fqn": ["zendesk", "unstructured", "intermediate", "int_zendesk__ticket_document"], "alias": "int_zendesk__ticket_document", "checksum": {"name": "sha256", "checksum": "1fd6807d45c4904ff1ecbc4b929c675ae0b766b40a711641af85cfe4c6cae4ec"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513415.9361532, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_document\"", "raw_code": "{{ config(enabled=var('zendesk__unstructured_enabled', False)) }}\n\nwith tickets as (\n select *\n from {{ var('ticket') }}\n\n), users as (\n select *\n from {{ var('user') }}\n\n), ticket_details as (\n select\n tickets.ticket_id,\n tickets.subject AS ticket_name,\n {{ zendesk.coalesce_cast([\"users.name\", \"'UNKNOWN'\"], dbt.type_string()) }} as user_name,\n {{ zendesk.coalesce_cast([\"users.email\", \"'UNKNOWN'\"], dbt.type_string()) }} as created_by,\n tickets.created_at AS created_on,\n {{ zendesk.coalesce_cast([\"tickets.status\", \"'UNKNOWN'\"], dbt.type_string()) }} as status,\n {{ zendesk.coalesce_cast([\"tickets.priority\", \"'UNKNOWN'\"], dbt.type_string()) }} as priority\n from tickets\n left join users\n on tickets.requester_id = users.user_id\n where not coalesce(tickets._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), final as (\n select\n ticket_id,\n {{ dbt.concat([\n \"'# Ticket : '\", \"ticket_name\", \"'\\\\n\\\\n'\",\n \"'Created By : '\", \"user_name\", \"' ('\", \"created_by\", \"')\\\\n'\",\n \"'Created On : '\", \"created_on\", \"'\\\\n'\",\n \"'Status : '\", \"status\", \"'\\\\n'\",\n \"'Priority : '\", \"priority\"\n ]) }} as ticket_markdown\n from ticket_details\n)\n\nselect \n *,\n {{ zendesk.count_tokens(\"ticket_markdown\") }} as ticket_tokens\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.zendesk.coalesce_cast", "macro.dbt.concat", "macro.zendesk.count_tokens"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/unstructured/intermediate/int_zendesk__ticket_document.sql", "compiled": true, "compiled_code": "\n\nwith tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), ticket_details as (\n select\n tickets.ticket_id,\n tickets.subject AS ticket_name,\n \n coalesce(\n cast(users.name as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as user_name,\n \n coalesce(\n cast(users.email as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as created_by,\n tickets.created_at AS created_on,\n \n coalesce(\n cast(tickets.status as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as status,\n \n coalesce(\n cast(tickets.priority as TEXT),\n cast('UNKNOWN' as TEXT)\n )\n as priority\n from tickets\n left join users\n on tickets.requester_id = users.user_id\n where not coalesce(tickets._fivetran_deleted, False)\n and not coalesce(users._fivetran_deleted, False)\n\n), final as (\n select\n ticket_id,\n '# Ticket : ' || ticket_name || '\\n\\n' || 'Created By : ' || user_name || ' (' || created_by || ')\\n' || 'Created On : ' || created_on || '\\n' || 'Status : ' || status || '\\n' || 'Priority : ' || priority as ticket_markdown\n from ticket_details\n)\n\nselect \n *,\n \n \n\n length(\n ticket_markdown\n ) / 4 -- 1 token is approximately 4 characters, and we only need an approximation here.\n as ticket_tokens\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__updates.sql", "original_file_path": "models/intermediate/int_zendesk__updates.sql", "unique_id": "model.zendesk.int_zendesk__updates", "fqn": ["zendesk", "intermediate", "int_zendesk__updates"], "alias": "int_zendesk__updates", "checksum": {"name": "sha256", "checksum": "3ecf6bfe15bd7a820b369379fff7dadf236c00ce2fe6c7e335c73c07ba67de0e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.942361, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"", "raw_code": "with ticket_history as (\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), ticket_comment as (\n select *\n from {{ ref('stg_zendesk__ticket_comment') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as {{ dbt.type_string() }}) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__updates.sql", "compiled": true, "compiled_code": "with ticket_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), ticket_comment as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as TEXT) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_assignee.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_assignee.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_assignee"], "alias": "int_zendesk__ticket_historical_assignee", "checksum": {"name": "sha256", "checksum": "7ae5d5632274b7ccf900910f272cf791e7e976e48fbd170adca647955ab5e2ae"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.945837, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"", "raw_code": "with assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then {{ dbt.datediff(\"coalesce(previous_update, ticket_created_date)\", \"valid_starting_at\", 'second') }} / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n {{ dbt_utils.group_by(n=6) }}\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_utils.group_by"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_assignee.sql", "compiled": true, "compiled_code": "with assignee_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then \n (\n (\n (\n ((valid_starting_at)::date - (coalesce(previous_update, ticket_created_date))::date)\n * 24 + date_part('hour', (valid_starting_at)::timestamp) - date_part('hour', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + date_part('minute', (valid_starting_at)::timestamp) - date_part('minute', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + floor(date_part('second', (valid_starting_at)::timestamp)) - floor(date_part('second', (coalesce(previous_update, ticket_created_date))::timestamp)))\n / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n group by 1,2,3,4,5,6\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_status": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_status.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_status.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_status", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_status"], "alias": "int_zendesk__ticket_historical_status", "checksum": {"name": "sha256", "checksum": "c3d207d8a59844953cd5d01532d3e023d7441025158cc2385fc3fa1441e34c13"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.950134, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_status\"", "raw_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n {{ dbt.datediff(\n 'valid_starting_at',\n \"coalesce(valid_ending_at, \" ~ dbt.current_timestamp() ~ \")\",\n 'minute') }} as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_status.sql", "compiled": true, "compiled_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n \n (\n (\n ((coalesce(valid_ending_at, now()))::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (coalesce(valid_ending_at, now()))::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (coalesce(valid_ending_at, now()))::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__user_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__user_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__user_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__user_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__user_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__user_aggregates"], "alias": "int_zendesk__user_aggregates", "checksum": {"name": "sha256", "checksum": "ae23565fdc62d13c33ddb03f3b25a5e288ec6e6ffe6b57cb01496be6ecd2b73f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.9537878, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__user_aggregates\"", "raw_code": "with users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n--If you use user tags this will be included, if not it will be ignored.\n{% if var('using_user_tags', True) %}\n), user_tags as (\n\n select *\n from {{ ref('stg_zendesk__user_tag') }}\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n {{ fivetran_utils.string_agg( 'user_tags.tags', \"', '\" )}} as user_tags\n from user_tags\n group by 1\n\n{% endif %}\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,user_tag_aggregate.user_tags\n {% endif %}\n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n left join user_tag_aggregate\n using(user_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__user_tag", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__user_aggregates.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n--If you use user tags this will be included, if not it will be ignored.\n\n), user_tags as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag\"\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n \n string_agg(user_tags.tags, ', ')\n\n as user_tags\n from user_tags\n group by 1\n\n\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,user_tag_aggregate.user_tags\n \n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n \n left join user_tag_aggregate\n using(user_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_spine": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_spine", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_spine.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_spine.sql", "unique_id": "model.zendesk.int_zendesk__schedule_spine", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_spine"], "alias": "int_zendesk__schedule_spine", "checksum": {"name": "sha256", "checksum": "486d65e0e9e9c232524a9a11ee68202dcddb67a41bb6c4bb17ca656b6965688e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513415.958787, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_spine\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n/*\n This model generates `valid_from` and `valid_until` timestamps for each schedule start_time and stop_time, \n accounting for timezone changes, holidays, and historical schedule adjustments. The inclusion of holidays \n and historical changes is controlled by variables `using_holidays` and `using_schedule_histories`.\n\n !!! Important distinction for holiday ranges: A holiday remains valid through the entire day specified by \n the `valid_until` field. In contrast, schedule history and timezone `valid_until` values mark the end of \n validity at the start of the specified day.\n*/\n\nwith schedule_timezones as (\n select *\n from {{ ref('int_zendesk__schedule_timezones') }} \n\n{% if var('using_holidays', True) %}\n), schedule_holidays as (\n select *\n from {{ ref('int_zendesk__schedule_holiday') }} \n\n-- Joins the schedules with holidays, ensuring holidays fall within the valid schedule period.\n-- If there are no holidays, the columns are filled with null values.\n), join_holidays as (\n select \n schedule_timezones.schedule_id,\n schedule_timezones.time_zone,\n schedule_timezones.offset_minutes,\n schedule_timezones.start_time_utc,\n schedule_timezones.end_time_utc,\n schedule_timezones.schedule_name,\n schedule_timezones.schedule_valid_from,\n schedule_timezones.schedule_valid_until,\n schedule_timezones.schedule_starting_sunday,\n schedule_timezones.schedule_ending_sunday,\n schedule_timezones.change_type,\n schedule_holidays.holiday_date,\n schedule_holidays.holiday_name,\n schedule_holidays.holiday_valid_from,\n schedule_holidays.holiday_valid_until,\n schedule_holidays.holiday_starting_sunday,\n schedule_holidays.holiday_ending_sunday,\n schedule_holidays.holiday_start_or_end\n from schedule_timezones\n left join schedule_holidays\n on schedule_holidays.schedule_id = schedule_timezones.schedule_id\n and schedule_holidays.holiday_date >= schedule_timezones.schedule_valid_from\n and schedule_holidays.holiday_date < schedule_timezones.schedule_valid_until\n\n-- Find and count all holidays that fall within a schedule range.\n), valid_from_partition as(\n select\n join_holidays.*,\n row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index,\n count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index\n from join_holidays\n\n-- Label the partition start and add a row to account for the partition end if there are multiple valid periods.\n), add_partition_end_row as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n case when valid_from_index = 1 and holiday_start_or_end is not null\n then 'partition_start'\n else holiday_start_or_end\n end as holiday_start_or_end,\n valid_from_index,\n max_valid_from_index\n from valid_from_partition\n \n union all\n\n -- when max_valid_from_index > 1, then we want to duplicate the last row to end the partition.\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n 'partition_end' as holiday_start_or_end,\n max_valid_from_index + 1 as valid_from_index,\n max_valid_from_index\n from valid_from_partition\n where max_valid_from_index > 1\n and valid_from_index = max_valid_from_index -- this finds the last rows to duplicate\n\n-- Adjusts and fills the valid from and valid until times for each partition, taking into account the partition start, gap, or holiday.\n), adjust_ranges as(\n select\n add_partition_end_row.*,\n case\n when holiday_start_or_end = 'partition_start'\n then schedule_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lag(holiday_ending_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_starting_sunday\n when holiday_start_or_end = 'partition_end'\n then holiday_ending_sunday\n else schedule_starting_sunday\n end as valid_from,\n case \n when holiday_start_or_end = 'partition_start'\n then holiday_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lead(holiday_starting_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_ending_sunday\n when holiday_start_or_end = 'partition_end'\n then schedule_ending_sunday\n else schedule_ending_sunday\n end as valid_until\n from add_partition_end_row\n\n), holiday_weeks as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n valid_from,\n valid_until,\n holiday_name,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_start_or_end,\n valid_from_index,\n case when holiday_start_or_end = '1_holiday'\n then 'holiday'\n else change_type\n end as change_type\n from adjust_ranges\n -- filter out irrelevant records after adjusting the ranges\n where not (valid_from >= valid_until and holiday_date is not null)\n\n-- Converts holiday valid_from and valid_until times into minutes from the start of the week, adjusting for timezones.\n), valid_minutes as(\n select\n holiday_weeks.*,\n\n -- Calculate holiday_valid_from in minutes from week start\n case when change_type = 'holiday' \n then ({{ dbt.datediff('holiday_starting_sunday', 'holiday_valid_from', 'minute') }}\n - offset_minutes) -- timezone adjustment\n else null\n end as holiday_valid_from_minutes_from_week_start,\n\n -- Calculate holiday_valid_until in minutes from week start\n case when change_type = 'holiday' \n then ({{ dbt.datediff('holiday_starting_sunday', 'holiday_valid_until', 'minute') }}\n + 24 * 60 -- add 1 day to set the upper bound of the holiday\n - offset_minutes)-- timezone adjustment\n else null\n end as holiday_valid_until_minutes_from_week_start\n from holiday_weeks\n\n-- Identifies whether a schedule overlaps with a holiday by comparing start and end times with holiday minutes.\n), find_holidays as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type,\n case \n when start_time_utc < holiday_valid_until_minutes_from_week_start\n and end_time_utc > holiday_valid_from_minutes_from_week_start\n and change_type = 'holiday' \n then holiday_name\n else cast(null as {{ dbt.type_string() }}) \n end as holiday_name,\n count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc) as number_holidays_in_week\n from valid_minutes\n\n-- Filter out records where holiday overlaps don't match, ensuring each schedule's holiday status is consistent.\n), filter_holidays as(\n select \n *,\n cast(1 as {{ dbt.type_int() }}) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week = 1\n\n union all\n\n -- CFount the number of records for each schedule start_time_utc and end_time_utc for filtering later.\n select \n distinct *,\n cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_name) \n as {{ dbt.type_int() }}) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week > 1\n\n), final as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type\n from filter_holidays\n\n -- This filter ensures that for each schedule, the count of holidays in a week matches the number \n -- of distinct schedule records with the same start_time_utc and end_time_utc.\n -- Rows where this count doesn't match indicate overlap with a holiday, so we filter out that record.\n -- Additionally, schedule records that fall on a holiday are excluded by checking if holiday_name is null.\n where number_holidays_in_week = number_records_for_schedule_start_end\n and holiday_name is null\n\n{% else %} \n), final as(\n select \n schedule_id,\n schedule_valid_from as valid_from,\n schedule_valid_until as valid_until,\n start_time_utc,\n end_time_utc,\n change_type\n from schedule_timezones\n{% endif %} \n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__schedule_timezones", "package": null, "version": null}, {"name": "int_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt.type_string", "macro.dbt.type_int"], "nodes": ["model.zendesk.int_zendesk__schedule_timezones", "model.zendesk.int_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_spine.sql", "compiled": true, "compiled_code": "\n\n/*\n This model generates `valid_from` and `valid_until` timestamps for each schedule start_time and stop_time, \n accounting for timezone changes, holidays, and historical schedule adjustments. The inclusion of holidays \n and historical changes is controlled by variables `using_holidays` and `using_schedule_histories`.\n\n !!! Important distinction for holiday ranges: A holiday remains valid through the entire day specified by \n the `valid_until` field. In contrast, schedule history and timezone `valid_until` values mark the end of \n validity at the start of the specified day.\n*/\n\nwith __dbt__cte__int_zendesk__timezone_daylight as (\n\n\nwith timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n now() + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n now() + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(now() as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as timestamp) as valid_from,\n cast(valid_until as timestamp) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final\n), __dbt__cte__int_zendesk__schedule_timezones as (\n\n\nwith split_timezones as (\n select *\n from __dbt__cte__int_zendesk__timezone_daylight \n\n), schedule as (\n select \n *,\n max(created_at) over (partition by schedule_id) as max_created_at\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n\n), schedule_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_history\" \n\n-- Select the most recent timezone associated with each schedule based on \n-- the max_created_at timestamp. Historical timezone changes are not yet tracked.\n), schedule_id_timezone as (\n select\n distinct schedule_id,\n lower(time_zone) as time_zone,\n schedule_name\n from schedule\n where created_at = max_created_at\n\n-- Combine historical schedules with the most recent timezone data. Filter \n-- out records where the timezone is missing, indicating the schedule has \n-- been deleted.\n), schedule_history_timezones as (\n select\n schedule_history.schedule_id,\n schedule_history.schedule_id_index,\n schedule_history.start_time,\n schedule_history.end_time,\n schedule_history.valid_from,\n schedule_history.valid_until,\n lower(schedule_id_timezone.time_zone) as time_zone,\n schedule_id_timezone.schedule_name\n from schedule_history\n left join schedule_id_timezone\n on schedule_id_timezone.schedule_id = schedule_history.schedule_id\n -- We have to filter these records out since time math requires timezone\n -- revisit later if this becomes a bigger issue\n where time_zone is not null\n\n-- Combine current schedules with historical schedules. Adjust the valid_from and valid_until dates accordingly.\n), union_schedule_histories as (\n select\n schedule_id,\n 0 as schedule_id_index, -- set the index as 0 for the current schedule\n created_at,\n start_time,\n end_time,\n lower(time_zone) as time_zone,\n schedule_name,\n cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill this value later\n cast(\n\n now() + ((interval '1 day') * (7))\n\n as date) as valid_until,\n False as is_historical\n from schedule\n\n union all\n\n select\n schedule_id,\n schedule_id_index,\n cast(null as timestamp) as created_at,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n cast(valid_from as date) as valid_from,\n cast(valid_until as date) as valid_until,\n True as is_historical\n from schedule_history_timezones\n\n-- Set the schedule_valid_from for current schedules based on the most recent historical row.\n-- This allows the current schedule to pick up where the historical schedule left off.\n), fill_current_schedule as (\n select\n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n coalesce(case\n when schedule_id_index = 0\n -- get max valid_until from historical rows in the same schedule\n then max(case when schedule_id_index > 0 then valid_until end) \n over (partition by schedule_id)\n else valid_from\n end,\n cast(created_at as date))\n as schedule_valid_from,\n valid_until as schedule_valid_until\n from union_schedule_histories\n\n-- Detect adjacent time periods by lagging the schedule_valid_until value \n-- to identify effectively unchanged schedules.\n), lag_valid_until as (\n select \n fill_current_schedule.*,\n lag(schedule_valid_until) over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from, schedule_valid_until) as previous_valid_until\n from fill_current_schedule\n\n-- Identify distinct schedule groupings based on schedule_id, start_time, and end_time.\n-- Consolidate only adjacent schedules; if a schedule changes and later reverts to its original time, \n-- we want to maintain the intermediate schedule change.\n), find_actual_changes as (\n select \n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n\n -- The group_id increments only when there is a gap between the previous schedule's \n -- valid_until and the current schedule's valid_from, signaling the schedules are not adjacent.\n -- Adjacent schedules with the same start_time and end_time are grouped together, \n -- while non-adjacent schedules are treated as separate groups.\n sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row\n over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from\n rows between unbounded preceding and current row)\n as group_id\n from lag_valid_until\n\n-- Consolidate records into continuous periods by finding the minimum \n-- valid_from and maximum valid_until for each group.\n), consolidate_changes as (\n select \n schedule_id,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n group_id,\n min(schedule_id_index) as schedule_id_index, --helps with tracking downstream.\n min(schedule_valid_from) as schedule_valid_from,\n max(schedule_valid_until) as schedule_valid_until\n from find_actual_changes\n group by 1,2,3,4,5,6\n\n-- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01 for full schedule coverage.\n), reset_schedule_start as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n start_time,\n end_time,\n case \n when schedule_valid_from = min(schedule_valid_from) over (partition by schedule_id) then '1970-01-01'\n else schedule_valid_from\n end as schedule_valid_from,\n schedule_valid_until\n from consolidate_changes\n\n-- Adjust the schedule times to UTC by applying the timezone offset. Join all possible\n-- time_zone matches for each schedule. The erroneous timezones will be filtered next.\n), schedule_timezones as (\n select \n reset_schedule_start.schedule_id,\n reset_schedule_start.schedule_id_index,\n reset_schedule_start.time_zone,\n reset_schedule_start.schedule_name,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes,\n reset_schedule_start.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n reset_schedule_start.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n cast(reset_schedule_start.schedule_valid_from as timestamp) as schedule_valid_from,\n cast(reset_schedule_start.schedule_valid_until as timestamp) as schedule_valid_until,\n -- we'll use these to determine which schedule version to associate tickets with.\n cast(date_trunc('day', split_timezones.valid_from) as timestamp) as timezone_valid_from,\n cast(date_trunc('day', split_timezones.valid_until) as timestamp) as timezone_valid_until\n from reset_schedule_start\n left join split_timezones\n on split_timezones.time_zone = reset_schedule_start.time_zone\n\n-- Assemble the final schedule-timezone relationship by determining the correct \n-- schedule_valid_from and schedule_valid_until based on overlapping periods \n-- between the schedule and timezone. \n), final_schedule as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n timezone_valid_from,\n timezone_valid_until,\n -- Be very careful if changing the order of these case whens--it does matter!\n case\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then schedule_valid_from\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then timezone_valid_from\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_from\n end as schedule_valid_from,\n case\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then schedule_valid_until\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then timezone_valid_until\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_until\n end as schedule_valid_until\n\n from schedule_timezones\n\n -- Filter records based on whether the schedule periods overlap with timezone periods. Capture\n -- when a schedule start or end falls within a time zone, and also capture timezones that exist\n -- entirely within the bounds of a schedule. \n -- timezone that a schedule start falls within\n where (schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until)\n -- timezone that a schedule end falls within\n or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until)\n -- timezones that fall completely within the bounds of the schedule\n or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until)\n\n\n\n), final as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_valid_from,\n schedule_valid_until,\n -- use dbt_date.week_start to ensure we truncate to Sunday\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_valid_from + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as schedule_starting_sunday,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_valid_until + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as schedule_ending_sunday,\n -- Check if the start fo the schedule was from a schedule or timezone change for tracking downstream.\n case when schedule_valid_from = timezone_valid_from\n then 'timezone'\n else 'schedule'\n end as change_type\n from final_schedule\n)\n\nselect * \nfrom final\n), __dbt__cte__int_zendesk__schedule_holiday as (\n\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may \n change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream \n to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time).\n*/\n\n\nwith schedule as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n), schedule_holiday as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\" \n\n-- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start.\n), schedule_holiday_ranges as (\n select\n holiday_name,\n schedule_id,\n cast(date_trunc('day', holiday_start_date_at) as timestamp) as holiday_valid_from,\n cast(date_trunc('day', holiday_end_date_at) as timestamp) as holiday_valid_until,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n holiday_start_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_starting_sunday,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n \n\n holiday_end_date_at + ((interval '1 week') * (1))\n\n + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_ending_sunday,\n -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays.\n \n (\n ((holiday_end_date_at)::date - (holiday_start_date_at)::date)\n / 7 + case\n when date_part('dow', (holiday_start_date_at)::timestamp) <= date_part('dow', (holiday_end_date_at)::timestamp) then\n case when holiday_start_date_at <= holiday_end_date_at then 0 else -1 end\n else\n case when holiday_start_date_at <= holiday_end_date_at then 1 else 0 end\n end)\n + 1 as holiday_weeks_spanned\n from schedule_holiday\n\n-- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte.\n), expanded_holidays as (\n select\n schedule_holiday_ranges.*,\n cast(week_numbers.generated_number as integer) as holiday_week_number\n from schedule_holiday_ranges\n -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks\n cross join (\n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n) as week_numbers\n where schedule_holiday_ranges.holiday_weeks_spanned > 1\n and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned\n\n-- Define start and end times for each segment of a multi-week holiday.\n), split_multiweek_holidays as (\n\n -- Business as usual for holidays that fall within a single week.\n select\n holiday_name,\n schedule_id,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_weeks_spanned\n from schedule_holiday_ranges\n where holiday_weeks_spanned = 1\n\n union all\n\n -- Split holidays by week that span multiple weeks since the schedule spine is based on weeks.\n select\n holiday_name,\n schedule_id,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_valid_from\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * ((holiday_week_number - 1) * 7))\n\n as timestamp)\n end as holiday_valid_from,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_valid_until\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n \n\n holiday_starting_sunday + ((interval '1 day') * (holiday_week_number * 7))\n\n + ((interval '1 day') * (-1))\n\n as timestamp) -- saturday\n end as holiday_valid_until,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_starting_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * ((holiday_week_number - 1) * 7))\n\n as timestamp)\n end as holiday_starting_sunday,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_ending_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * (holiday_week_number * 7))\n\n as timestamp)\n end as holiday_ending_sunday,\n holiday_weeks_spanned\n from expanded_holidays\n where holiday_weeks_spanned > 1\n\n-- Create a record for each the holiday start and holiday end for each week to use downstream.\n), split_holidays as (\n -- Creates a record that will be used for the time before a holiday\n select\n split_multiweek_holidays.*,\n holiday_valid_from as holiday_date,\n '0_gap' as holiday_start_or_end\n from split_multiweek_holidays\n\n union all\n\n -- Creates another record that will be used for the holiday itself\n select\n split_multiweek_holidays.*,\n holiday_valid_until as holiday_date,\n '1_holiday' as holiday_start_or_end\n from split_multiweek_holidays\n)\n\nselect *\nfrom split_holidays\n), schedule_timezones as (\n select *\n from __dbt__cte__int_zendesk__schedule_timezones \n\n\n), schedule_holidays as (\n select *\n from __dbt__cte__int_zendesk__schedule_holiday \n\n-- Joins the schedules with holidays, ensuring holidays fall within the valid schedule period.\n-- If there are no holidays, the columns are filled with null values.\n), join_holidays as (\n select \n schedule_timezones.schedule_id,\n schedule_timezones.time_zone,\n schedule_timezones.offset_minutes,\n schedule_timezones.start_time_utc,\n schedule_timezones.end_time_utc,\n schedule_timezones.schedule_name,\n schedule_timezones.schedule_valid_from,\n schedule_timezones.schedule_valid_until,\n schedule_timezones.schedule_starting_sunday,\n schedule_timezones.schedule_ending_sunday,\n schedule_timezones.change_type,\n schedule_holidays.holiday_date,\n schedule_holidays.holiday_name,\n schedule_holidays.holiday_valid_from,\n schedule_holidays.holiday_valid_until,\n schedule_holidays.holiday_starting_sunday,\n schedule_holidays.holiday_ending_sunday,\n schedule_holidays.holiday_start_or_end\n from schedule_timezones\n left join schedule_holidays\n on schedule_holidays.schedule_id = schedule_timezones.schedule_id\n and schedule_holidays.holiday_date >= schedule_timezones.schedule_valid_from\n and schedule_holidays.holiday_date < schedule_timezones.schedule_valid_until\n\n-- Find and count all holidays that fall within a schedule range.\n), valid_from_partition as(\n select\n join_holidays.*,\n row_number() over (partition by schedule_id, start_time_utc, schedule_valid_from order by holiday_date, holiday_start_or_end) as valid_from_index,\n count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index\n from join_holidays\n\n-- Label the partition start and add a row to account for the partition end if there are multiple valid periods.\n), add_partition_end_row as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n case when valid_from_index = 1 and holiday_start_or_end is not null\n then 'partition_start'\n else holiday_start_or_end\n end as holiday_start_or_end,\n valid_from_index,\n max_valid_from_index\n from valid_from_partition\n \n union all\n\n -- when max_valid_from_index > 1, then we want to duplicate the last row to end the partition.\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n schedule_starting_sunday,\n schedule_ending_sunday,\n change_type,\n holiday_name,\n holiday_date,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n 'partition_end' as holiday_start_or_end,\n max_valid_from_index + 1 as valid_from_index,\n max_valid_from_index\n from valid_from_partition\n where max_valid_from_index > 1\n and valid_from_index = max_valid_from_index -- this finds the last rows to duplicate\n\n-- Adjusts and fills the valid from and valid until times for each partition, taking into account the partition start, gap, or holiday.\n), adjust_ranges as(\n select\n add_partition_end_row.*,\n case\n when holiday_start_or_end = 'partition_start'\n then schedule_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lag(holiday_ending_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_starting_sunday\n when holiday_start_or_end = 'partition_end'\n then holiday_ending_sunday\n else schedule_starting_sunday\n end as valid_from,\n case \n when holiday_start_or_end = 'partition_start'\n then holiday_starting_sunday\n when holiday_start_or_end = '0_gap'\n then lead(holiday_starting_sunday) over (partition by schedule_id, start_time_utc, schedule_valid_from order by valid_from_index)\n when holiday_start_or_end = '1_holiday'\n then holiday_ending_sunday\n when holiday_start_or_end = 'partition_end'\n then schedule_ending_sunday\n else schedule_ending_sunday\n end as valid_until\n from add_partition_end_row\n\n), holiday_weeks as(\n select\n schedule_id,\n time_zone,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_name,\n valid_from,\n valid_until,\n holiday_name,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_start_or_end,\n valid_from_index,\n case when holiday_start_or_end = '1_holiday'\n then 'holiday'\n else change_type\n end as change_type\n from adjust_ranges\n -- filter out irrelevant records after adjusting the ranges\n where not (valid_from >= valid_until and holiday_date is not null)\n\n-- Converts holiday valid_from and valid_until times into minutes from the start of the week, adjusting for timezones.\n), valid_minutes as(\n select\n holiday_weeks.*,\n\n -- Calculate holiday_valid_from in minutes from week start\n case when change_type = 'holiday' \n then (\n (\n (\n ((holiday_valid_from)::date - (holiday_starting_sunday)::date)\n * 24 + date_part('hour', (holiday_valid_from)::timestamp) - date_part('hour', (holiday_starting_sunday)::timestamp))\n * 60 + date_part('minute', (holiday_valid_from)::timestamp) - date_part('minute', (holiday_starting_sunday)::timestamp))\n \n - offset_minutes) -- timezone adjustment\n else null\n end as holiday_valid_from_minutes_from_week_start,\n\n -- Calculate holiday_valid_until in minutes from week start\n case when change_type = 'holiday' \n then (\n (\n (\n ((holiday_valid_until)::date - (holiday_starting_sunday)::date)\n * 24 + date_part('hour', (holiday_valid_until)::timestamp) - date_part('hour', (holiday_starting_sunday)::timestamp))\n * 60 + date_part('minute', (holiday_valid_until)::timestamp) - date_part('minute', (holiday_starting_sunday)::timestamp))\n \n + 24 * 60 -- add 1 day to set the upper bound of the holiday\n - offset_minutes)-- timezone adjustment\n else null\n end as holiday_valid_until_minutes_from_week_start\n from holiday_weeks\n\n-- Identifies whether a schedule overlaps with a holiday by comparing start and end times with holiday minutes.\n), find_holidays as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type,\n case \n when start_time_utc < holiday_valid_until_minutes_from_week_start\n and end_time_utc > holiday_valid_from_minutes_from_week_start\n and change_type = 'holiday' \n then holiday_name\n else cast(null as TEXT) \n end as holiday_name,\n count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc) as number_holidays_in_week\n from valid_minutes\n\n-- Filter out records where holiday overlaps don't match, ensuring each schedule's holiday status is consistent.\n), filter_holidays as(\n select \n *,\n cast(1 as integer) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week = 1\n\n union all\n\n -- CFount the number of records for each schedule start_time_utc and end_time_utc for filtering later.\n select \n distinct *,\n cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_name) \n as integer) as number_records_for_schedule_start_end\n from find_holidays\n where number_holidays_in_week > 1\n\n), final as(\n select \n schedule_id,\n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n change_type\n from filter_holidays\n\n -- This filter ensures that for each schedule, the count of holidays in a week matches the number \n -- of distinct schedule records with the same start_time_utc and end_time_utc.\n -- Rows where this count doesn't match indicate overlap with a holiday, so we filter out that record.\n -- Additionally, schedule records that fall on a holiday are excluded by checking if holiday_name is null.\n where number_holidays_in_week = number_records_for_schedule_start_end\n and holiday_name is null\n\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__timezone_daylight", "sql": " __dbt__cte__int_zendesk__timezone_daylight as (\n\n\nwith timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n now() + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n now() + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(now() as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as timestamp) as valid_from,\n cast(valid_until as timestamp) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final\n)"}, {"id": "model.zendesk.int_zendesk__schedule_timezones", "sql": " __dbt__cte__int_zendesk__schedule_timezones as (\n\n\nwith split_timezones as (\n select *\n from __dbt__cte__int_zendesk__timezone_daylight \n\n), schedule as (\n select \n *,\n max(created_at) over (partition by schedule_id) as max_created_at\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n\n), schedule_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_history\" \n\n-- Select the most recent timezone associated with each schedule based on \n-- the max_created_at timestamp. Historical timezone changes are not yet tracked.\n), schedule_id_timezone as (\n select\n distinct schedule_id,\n lower(time_zone) as time_zone,\n schedule_name\n from schedule\n where created_at = max_created_at\n\n-- Combine historical schedules with the most recent timezone data. Filter \n-- out records where the timezone is missing, indicating the schedule has \n-- been deleted.\n), schedule_history_timezones as (\n select\n schedule_history.schedule_id,\n schedule_history.schedule_id_index,\n schedule_history.start_time,\n schedule_history.end_time,\n schedule_history.valid_from,\n schedule_history.valid_until,\n lower(schedule_id_timezone.time_zone) as time_zone,\n schedule_id_timezone.schedule_name\n from schedule_history\n left join schedule_id_timezone\n on schedule_id_timezone.schedule_id = schedule_history.schedule_id\n -- We have to filter these records out since time math requires timezone\n -- revisit later if this becomes a bigger issue\n where time_zone is not null\n\n-- Combine current schedules with historical schedules. Adjust the valid_from and valid_until dates accordingly.\n), union_schedule_histories as (\n select\n schedule_id,\n 0 as schedule_id_index, -- set the index as 0 for the current schedule\n created_at,\n start_time,\n end_time,\n lower(time_zone) as time_zone,\n schedule_name,\n cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill this value later\n cast(\n\n now() + ((interval '1 day') * (7))\n\n as date) as valid_until,\n False as is_historical\n from schedule\n\n union all\n\n select\n schedule_id,\n schedule_id_index,\n cast(null as timestamp) as created_at,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n cast(valid_from as date) as valid_from,\n cast(valid_until as date) as valid_until,\n True as is_historical\n from schedule_history_timezones\n\n-- Set the schedule_valid_from for current schedules based on the most recent historical row.\n-- This allows the current schedule to pick up where the historical schedule left off.\n), fill_current_schedule as (\n select\n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n coalesce(case\n when schedule_id_index = 0\n -- get max valid_until from historical rows in the same schedule\n then max(case when schedule_id_index > 0 then valid_until end) \n over (partition by schedule_id)\n else valid_from\n end,\n cast(created_at as date))\n as schedule_valid_from,\n valid_until as schedule_valid_until\n from union_schedule_histories\n\n-- Detect adjacent time periods by lagging the schedule_valid_until value \n-- to identify effectively unchanged schedules.\n), lag_valid_until as (\n select \n fill_current_schedule.*,\n lag(schedule_valid_until) over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from, schedule_valid_until) as previous_valid_until\n from fill_current_schedule\n\n-- Identify distinct schedule groupings based on schedule_id, start_time, and end_time.\n-- Consolidate only adjacent schedules; if a schedule changes and later reverts to its original time, \n-- we want to maintain the intermediate schedule change.\n), find_actual_changes as (\n select \n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n\n -- The group_id increments only when there is a gap between the previous schedule's \n -- valid_until and the current schedule's valid_from, signaling the schedules are not adjacent.\n -- Adjacent schedules with the same start_time and end_time are grouped together, \n -- while non-adjacent schedules are treated as separate groups.\n sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row\n over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from\n rows between unbounded preceding and current row)\n as group_id\n from lag_valid_until\n\n-- Consolidate records into continuous periods by finding the minimum \n-- valid_from and maximum valid_until for each group.\n), consolidate_changes as (\n select \n schedule_id,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n group_id,\n min(schedule_id_index) as schedule_id_index, --helps with tracking downstream.\n min(schedule_valid_from) as schedule_valid_from,\n max(schedule_valid_until) as schedule_valid_until\n from find_actual_changes\n group by 1,2,3,4,5,6\n\n-- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01 for full schedule coverage.\n), reset_schedule_start as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n start_time,\n end_time,\n case \n when schedule_valid_from = min(schedule_valid_from) over (partition by schedule_id) then '1970-01-01'\n else schedule_valid_from\n end as schedule_valid_from,\n schedule_valid_until\n from consolidate_changes\n\n-- Adjust the schedule times to UTC by applying the timezone offset. Join all possible\n-- time_zone matches for each schedule. The erroneous timezones will be filtered next.\n), schedule_timezones as (\n select \n reset_schedule_start.schedule_id,\n reset_schedule_start.schedule_id_index,\n reset_schedule_start.time_zone,\n reset_schedule_start.schedule_name,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes,\n reset_schedule_start.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n reset_schedule_start.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n cast(reset_schedule_start.schedule_valid_from as timestamp) as schedule_valid_from,\n cast(reset_schedule_start.schedule_valid_until as timestamp) as schedule_valid_until,\n -- we'll use these to determine which schedule version to associate tickets with.\n cast(date_trunc('day', split_timezones.valid_from) as timestamp) as timezone_valid_from,\n cast(date_trunc('day', split_timezones.valid_until) as timestamp) as timezone_valid_until\n from reset_schedule_start\n left join split_timezones\n on split_timezones.time_zone = reset_schedule_start.time_zone\n\n-- Assemble the final schedule-timezone relationship by determining the correct \n-- schedule_valid_from and schedule_valid_until based on overlapping periods \n-- between the schedule and timezone. \n), final_schedule as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n timezone_valid_from,\n timezone_valid_until,\n -- Be very careful if changing the order of these case whens--it does matter!\n case\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then schedule_valid_from\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then timezone_valid_from\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_from\n end as schedule_valid_from,\n case\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then schedule_valid_until\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then timezone_valid_until\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_until\n end as schedule_valid_until\n\n from schedule_timezones\n\n -- Filter records based on whether the schedule periods overlap with timezone periods. Capture\n -- when a schedule start or end falls within a time zone, and also capture timezones that exist\n -- entirely within the bounds of a schedule. \n -- timezone that a schedule start falls within\n where (schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until)\n -- timezone that a schedule end falls within\n or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until)\n -- timezones that fall completely within the bounds of the schedule\n or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until)\n\n\n\n), final as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_valid_from,\n schedule_valid_until,\n -- use dbt_date.week_start to ensure we truncate to Sunday\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_valid_from + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as schedule_starting_sunday,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_valid_until + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as schedule_ending_sunday,\n -- Check if the start fo the schedule was from a schedule or timezone change for tracking downstream.\n case when schedule_valid_from = timezone_valid_from\n then 'timezone'\n else 'schedule'\n end as change_type\n from final_schedule\n)\n\nselect * \nfrom final\n)"}, {"id": "model.zendesk.int_zendesk__schedule_holiday", "sql": " __dbt__cte__int_zendesk__schedule_holiday as (\n\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may \n change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream \n to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time).\n*/\n\n\nwith schedule as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n), schedule_holiday as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\" \n\n-- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start.\n), schedule_holiday_ranges as (\n select\n holiday_name,\n schedule_id,\n cast(date_trunc('day', holiday_start_date_at) as timestamp) as holiday_valid_from,\n cast(date_trunc('day', holiday_end_date_at) as timestamp) as holiday_valid_until,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n holiday_start_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_starting_sunday,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n \n\n holiday_end_date_at + ((interval '1 week') * (1))\n\n + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_ending_sunday,\n -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays.\n \n (\n ((holiday_end_date_at)::date - (holiday_start_date_at)::date)\n / 7 + case\n when date_part('dow', (holiday_start_date_at)::timestamp) <= date_part('dow', (holiday_end_date_at)::timestamp) then\n case when holiday_start_date_at <= holiday_end_date_at then 0 else -1 end\n else\n case when holiday_start_date_at <= holiday_end_date_at then 1 else 0 end\n end)\n + 1 as holiday_weeks_spanned\n from schedule_holiday\n\n-- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte.\n), expanded_holidays as (\n select\n schedule_holiday_ranges.*,\n cast(week_numbers.generated_number as integer) as holiday_week_number\n from schedule_holiday_ranges\n -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks\n cross join (\n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n) as week_numbers\n where schedule_holiday_ranges.holiday_weeks_spanned > 1\n and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned\n\n-- Define start and end times for each segment of a multi-week holiday.\n), split_multiweek_holidays as (\n\n -- Business as usual for holidays that fall within a single week.\n select\n holiday_name,\n schedule_id,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_weeks_spanned\n from schedule_holiday_ranges\n where holiday_weeks_spanned = 1\n\n union all\n\n -- Split holidays by week that span multiple weeks since the schedule spine is based on weeks.\n select\n holiday_name,\n schedule_id,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_valid_from\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * ((holiday_week_number - 1) * 7))\n\n as timestamp)\n end as holiday_valid_from,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_valid_until\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n \n\n holiday_starting_sunday + ((interval '1 day') * (holiday_week_number * 7))\n\n + ((interval '1 day') * (-1))\n\n as timestamp) -- saturday\n end as holiday_valid_until,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_starting_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * ((holiday_week_number - 1) * 7))\n\n as timestamp)\n end as holiday_starting_sunday,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_ending_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * (holiday_week_number * 7))\n\n as timestamp)\n end as holiday_ending_sunday,\n holiday_weeks_spanned\n from expanded_holidays\n where holiday_weeks_spanned > 1\n\n-- Create a record for each the holiday start and holiday end for each week to use downstream.\n), split_holidays as (\n -- Creates a record that will be used for the time before a holiday\n select\n split_multiweek_holidays.*,\n holiday_valid_from as holiday_date,\n '0_gap' as holiday_start_or_end\n from split_multiweek_holidays\n\n union all\n\n -- Creates another record that will be used for the holiday itself\n select\n split_multiweek_holidays.*,\n holiday_valid_until as holiday_date,\n '1_holiday' as holiday_start_or_end\n from split_multiweek_holidays\n)\n\nselect *\nfrom split_holidays\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_schedules": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_schedules", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_schedules.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_schedules.sql", "unique_id": "model.zendesk.int_zendesk__ticket_schedules", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_schedules"], "alias": "int_zendesk__ticket_schedules", "checksum": {"name": "sha256", "checksum": "30511daddcbbf831fc42f7e5039fad1c76a43499f3c208e1b982ab895dfa7d44"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513415.964258, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_schedules\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket as (\n \n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_schedule as (\n \n select *\n from {{ ref('stg_zendesk__ticket_schedule') }}\n\n), schedule as (\n \n select *\n from {{ ref('stg_zendesk__schedule') }}\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n{% if execute %}\n\n {% set default_schedule_id_query %}\n with set_default_schedule_flag as (\n select \n row_number() over (order by created_at) = 1 as is_default_schedule,\n id\n from {{ source('zendesk','schedule') }}\n where not coalesce(_fivetran_deleted, false)\n )\n select \n id\n from set_default_schedule_flag\n where is_default_schedule\n\n {% endset %}\n\n {% set default_schedule_id = run_query(default_schedule_id_query).columns[0][0]|string %}\n\n {% endif %}\n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '{{default_schedule_id}}' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -5, 'first_schedule.created_at') }} <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , {{ fivetran_utils.timestamp_add(\"hour\", 1000, \"\" ~ dbt.current_timestamp() ~ \"\") }} ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.timestamp_add", "macro.dbt.current_timestamp", "macro.dbt.run_query"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_schedules.sql", "compiled": true, "compiled_code": "\n\nwith ticket as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_schedule as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule\"\n\n), schedule as (\n \n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n\n\n \n\n \n\n \n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '360000310393' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and \n\n first_schedule.created_at + ((interval '1 second') * (-5))\n\n <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , \n\n now() + ((interval '1 hour') * (1000))\n\n ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__assignee_updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__assignee_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__assignee_updates.sql", "original_file_path": "models/intermediate/int_zendesk__assignee_updates.sql", "unique_id": "model.zendesk.int_zendesk__assignee_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__assignee_updates"], "alias": "int_zendesk__assignee_updates", "checksum": {"name": "sha256", "checksum": "951ec2d4f8c9a7470a50cfc6e01838a090472a9f18fccd2dd65097d309d43aed"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.970064, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__assignee_updates\"", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__assignee_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comment_metrics": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__comment_metrics", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__comment_metrics.sql", "original_file_path": "models/intermediate/int_zendesk__comment_metrics.sql", "unique_id": "model.zendesk.int_zendesk__comment_metrics", "fqn": ["zendesk", "intermediate", "int_zendesk__comment_metrics"], "alias": "int_zendesk__comment_metrics", "checksum": {"name": "sha256", "checksum": "b82ef2f9d10d6344cd46dcce904fe263a3b5b2cc12fd9b5c662e8b477a4b5f95"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.9713888, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__comment_metrics\"", "raw_code": "with ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__comment_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_timezones": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_timezones", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_timezones.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_timezones.sql", "unique_id": "model.zendesk.int_zendesk__schedule_timezones", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_timezones"], "alias": "int_zendesk__schedule_timezones", "checksum": {"name": "sha256", "checksum": "b713ed86f1e7f97cc9836d2944f5449c827effc898eda4184123e49d61a4241d"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728513415.972644, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith split_timezones as (\n select *\n from {{ ref('int_zendesk__timezone_daylight') }} \n\n), schedule as (\n select \n *,\n max(created_at) over (partition by schedule_id) as max_created_at\n from {{ var('schedule') }} \n\n{% if var('using_schedule_histories', True) %}\n), schedule_history as (\n select *\n from {{ ref('int_zendesk__schedule_history') }} \n\n-- Select the most recent timezone associated with each schedule based on \n-- the max_created_at timestamp. Historical timezone changes are not yet tracked.\n), schedule_id_timezone as (\n select\n distinct schedule_id,\n lower(time_zone) as time_zone,\n schedule_name\n from schedule\n where created_at = max_created_at\n\n-- Combine historical schedules with the most recent timezone data. Filter \n-- out records where the timezone is missing, indicating the schedule has \n-- been deleted.\n), schedule_history_timezones as (\n select\n schedule_history.schedule_id,\n schedule_history.schedule_id_index,\n schedule_history.start_time,\n schedule_history.end_time,\n schedule_history.valid_from,\n schedule_history.valid_until,\n lower(schedule_id_timezone.time_zone) as time_zone,\n schedule_id_timezone.schedule_name\n from schedule_history\n left join schedule_id_timezone\n on schedule_id_timezone.schedule_id = schedule_history.schedule_id\n -- We have to filter these records out since time math requires timezone\n -- revisit later if this becomes a bigger issue\n where time_zone is not null\n\n-- Combine current schedules with historical schedules. Adjust the valid_from and valid_until dates accordingly.\n), union_schedule_histories as (\n select\n schedule_id,\n 0 as schedule_id_index, -- set the index as 0 for the current schedule\n created_at,\n start_time,\n end_time,\n lower(time_zone) as time_zone,\n schedule_name,\n cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill this value later\n cast({{ dbt.dateadd('day', 7, dbt.current_timestamp()) }} as date) as valid_until,\n False as is_historical\n from schedule\n\n union all\n\n select\n schedule_id,\n schedule_id_index,\n cast(null as {{ dbt.type_timestamp() }}) as created_at,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n cast(valid_from as date) as valid_from,\n cast(valid_until as date) as valid_until,\n True as is_historical\n from schedule_history_timezones\n\n-- Set the schedule_valid_from for current schedules based on the most recent historical row.\n-- This allows the current schedule to pick up where the historical schedule left off.\n), fill_current_schedule as (\n select\n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n coalesce(case\n when schedule_id_index = 0\n -- get max valid_until from historical rows in the same schedule\n then max(case when schedule_id_index > 0 then valid_until end) \n over (partition by schedule_id)\n else valid_from\n end,\n cast(created_at as date))\n as schedule_valid_from,\n valid_until as schedule_valid_until\n from union_schedule_histories\n\n-- Detect adjacent time periods by lagging the schedule_valid_until value \n-- to identify effectively unchanged schedules.\n), lag_valid_until as (\n select \n fill_current_schedule.*,\n lag(schedule_valid_until) over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from, schedule_valid_until) as previous_valid_until\n from fill_current_schedule\n\n-- Identify distinct schedule groupings based on schedule_id, start_time, and end_time.\n-- Consolidate only adjacent schedules; if a schedule changes and later reverts to its original time, \n-- we want to maintain the intermediate schedule change.\n), find_actual_changes as (\n select \n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n\n -- The group_id increments only when there is a gap between the previous schedule's \n -- valid_until and the current schedule's valid_from, signaling the schedules are not adjacent.\n -- Adjacent schedules with the same start_time and end_time are grouped together, \n -- while non-adjacent schedules are treated as separate groups.\n sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row\n over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from\n rows between unbounded preceding and current row)\n as group_id\n from lag_valid_until\n\n-- Consolidate records into continuous periods by finding the minimum \n-- valid_from and maximum valid_until for each group.\n), consolidate_changes as (\n select \n schedule_id,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n group_id,\n min(schedule_id_index) as schedule_id_index, --helps with tracking downstream.\n min(schedule_valid_from) as schedule_valid_from,\n max(schedule_valid_until) as schedule_valid_until\n from find_actual_changes\n {{ dbt_utils.group_by(6) }}\n\n-- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01 for full schedule coverage.\n), reset_schedule_start as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n start_time,\n end_time,\n case \n when schedule_valid_from = min(schedule_valid_from) over (partition by schedule_id) then '1970-01-01'\n else schedule_valid_from\n end as schedule_valid_from,\n schedule_valid_until\n from consolidate_changes\n\n-- Adjust the schedule times to UTC by applying the timezone offset. Join all possible\n-- time_zone matches for each schedule. The erroneous timezones will be filtered next.\n), schedule_timezones as (\n select \n reset_schedule_start.schedule_id,\n reset_schedule_start.schedule_id_index,\n reset_schedule_start.time_zone,\n reset_schedule_start.schedule_name,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes,\n reset_schedule_start.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n reset_schedule_start.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n cast(reset_schedule_start.schedule_valid_from as {{ dbt.type_timestamp() }}) as schedule_valid_from,\n cast(reset_schedule_start.schedule_valid_until as {{ dbt.type_timestamp() }}) as schedule_valid_until,\n -- we'll use these to determine which schedule version to associate tickets with.\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as timezone_valid_from,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as timezone_valid_until\n from reset_schedule_start\n left join split_timezones\n on split_timezones.time_zone = reset_schedule_start.time_zone\n\n-- Assemble the final schedule-timezone relationship by determining the correct \n-- schedule_valid_from and schedule_valid_until based on overlapping periods \n-- between the schedule and timezone. \n), final_schedule as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n timezone_valid_from,\n timezone_valid_until,\n -- Be very careful if changing the order of these case whens--it does matter!\n case\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then schedule_valid_from\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then timezone_valid_from\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_from\n end as schedule_valid_from,\n case\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then schedule_valid_until\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then timezone_valid_until\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_until\n end as schedule_valid_until\n\n from schedule_timezones\n\n -- Filter records based on whether the schedule periods overlap with timezone periods. Capture\n -- when a schedule start or end falls within a time zone, and also capture timezones that exist\n -- entirely within the bounds of a schedule. \n -- timezone that a schedule start falls within\n where (schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until)\n -- timezone that a schedule end falls within\n or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until)\n -- timezones that fall completely within the bounds of the schedule\n or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until)\n\n{% else %} -- when not using schedule histories\n), final_schedule as (\n select \n schedule.schedule_id,\n 0 as schedule_id_index,\n lower(schedule.time_zone) as time_zone,\n schedule.schedule_name,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as schedule_valid_from,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as schedule_valid_until,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_from') }} as {{ dbt.type_timestamp() }}) as timezone_valid_from,\n cast({{ dbt.date_trunc('day', 'split_timezones.valid_until') }} as {{ dbt.type_timestamp() }}) as timezone_valid_until\n from schedule\n left join split_timezones\n on split_timezones.time_zone = lower(schedule.time_zone)\n{% endif %}\n\n), final as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_valid_from,\n schedule_valid_until,\n -- use dbt_date.week_start to ensure we truncate to Sunday\n cast({{ dbt_date.week_start('schedule_valid_from','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_starting_sunday,\n cast({{ dbt_date.week_start('schedule_valid_until','UTC') }} as {{ dbt.type_timestamp() }}) as schedule_ending_sunday,\n -- Check if the start fo the schedule was from a schedule or timezone change for tracking downstream.\n case when schedule_valid_from = timezone_valid_from\n then 'timezone'\n else 'schedule'\n end as change_type\n from final_schedule\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__timezone_daylight", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}, {"name": "int_zendesk__schedule_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp", "macro.dbt.dateadd", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt.date_trunc", "macro.dbt_date.week_start"], "nodes": ["model.zendesk.int_zendesk__timezone_daylight", "model.zendesk_source.stg_zendesk__schedule", "model.zendesk.int_zendesk__schedule_history"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_timezones.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__timezone_daylight as (\n\n\nwith timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n now() + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n now() + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(now() as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as timestamp) as valid_from,\n cast(valid_until as timestamp) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final\n), split_timezones as (\n select *\n from __dbt__cte__int_zendesk__timezone_daylight \n\n), schedule as (\n select \n *,\n max(created_at) over (partition by schedule_id) as max_created_at\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n\n), schedule_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_history\" \n\n-- Select the most recent timezone associated with each schedule based on \n-- the max_created_at timestamp. Historical timezone changes are not yet tracked.\n), schedule_id_timezone as (\n select\n distinct schedule_id,\n lower(time_zone) as time_zone,\n schedule_name\n from schedule\n where created_at = max_created_at\n\n-- Combine historical schedules with the most recent timezone data. Filter \n-- out records where the timezone is missing, indicating the schedule has \n-- been deleted.\n), schedule_history_timezones as (\n select\n schedule_history.schedule_id,\n schedule_history.schedule_id_index,\n schedule_history.start_time,\n schedule_history.end_time,\n schedule_history.valid_from,\n schedule_history.valid_until,\n lower(schedule_id_timezone.time_zone) as time_zone,\n schedule_id_timezone.schedule_name\n from schedule_history\n left join schedule_id_timezone\n on schedule_id_timezone.schedule_id = schedule_history.schedule_id\n -- We have to filter these records out since time math requires timezone\n -- revisit later if this becomes a bigger issue\n where time_zone is not null\n\n-- Combine current schedules with historical schedules. Adjust the valid_from and valid_until dates accordingly.\n), union_schedule_histories as (\n select\n schedule_id,\n 0 as schedule_id_index, -- set the index as 0 for the current schedule\n created_at,\n start_time,\n end_time,\n lower(time_zone) as time_zone,\n schedule_name,\n cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill this value later\n cast(\n\n now() + ((interval '1 day') * (7))\n\n as date) as valid_until,\n False as is_historical\n from schedule\n\n union all\n\n select\n schedule_id,\n schedule_id_index,\n cast(null as timestamp) as created_at,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n cast(valid_from as date) as valid_from,\n cast(valid_until as date) as valid_until,\n True as is_historical\n from schedule_history_timezones\n\n-- Set the schedule_valid_from for current schedules based on the most recent historical row.\n-- This allows the current schedule to pick up where the historical schedule left off.\n), fill_current_schedule as (\n select\n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n coalesce(case\n when schedule_id_index = 0\n -- get max valid_until from historical rows in the same schedule\n then max(case when schedule_id_index > 0 then valid_until end) \n over (partition by schedule_id)\n else valid_from\n end,\n cast(created_at as date))\n as schedule_valid_from,\n valid_until as schedule_valid_until\n from union_schedule_histories\n\n-- Detect adjacent time periods by lagging the schedule_valid_until value \n-- to identify effectively unchanged schedules.\n), lag_valid_until as (\n select \n fill_current_schedule.*,\n lag(schedule_valid_until) over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from, schedule_valid_until) as previous_valid_until\n from fill_current_schedule\n\n-- Identify distinct schedule groupings based on schedule_id, start_time, and end_time.\n-- Consolidate only adjacent schedules; if a schedule changes and later reverts to its original time, \n-- we want to maintain the intermediate schedule change.\n), find_actual_changes as (\n select \n schedule_id,\n schedule_id_index,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n schedule_valid_from,\n schedule_valid_until,\n\n -- The group_id increments only when there is a gap between the previous schedule's \n -- valid_until and the current schedule's valid_from, signaling the schedules are not adjacent.\n -- Adjacent schedules with the same start_time and end_time are grouped together, \n -- while non-adjacent schedules are treated as separate groups.\n sum(case when previous_valid_until = schedule_valid_from then 0 else 1 end) -- find if this row is adjacent to the previous row\n over (partition by schedule_id, start_time, end_time \n order by schedule_valid_from\n rows between unbounded preceding and current row)\n as group_id\n from lag_valid_until\n\n-- Consolidate records into continuous periods by finding the minimum \n-- valid_from and maximum valid_until for each group.\n), consolidate_changes as (\n select \n schedule_id,\n start_time,\n end_time,\n time_zone,\n schedule_name,\n group_id,\n min(schedule_id_index) as schedule_id_index, --helps with tracking downstream.\n min(schedule_valid_from) as schedule_valid_from,\n max(schedule_valid_until) as schedule_valid_until\n from find_actual_changes\n group by 1,2,3,4,5,6\n\n-- For each schedule_id, reset the earliest schedule_valid_from date to 1970-01-01 for full schedule coverage.\n), reset_schedule_start as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n start_time,\n end_time,\n case \n when schedule_valid_from = min(schedule_valid_from) over (partition by schedule_id) then '1970-01-01'\n else schedule_valid_from\n end as schedule_valid_from,\n schedule_valid_until\n from consolidate_changes\n\n-- Adjust the schedule times to UTC by applying the timezone offset. Join all possible\n-- time_zone matches for each schedule. The erroneous timezones will be filtered next.\n), schedule_timezones as (\n select \n reset_schedule_start.schedule_id,\n reset_schedule_start.schedule_id_index,\n reset_schedule_start.time_zone,\n reset_schedule_start.schedule_name,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes,\n reset_schedule_start.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n reset_schedule_start.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n cast(reset_schedule_start.schedule_valid_from as timestamp) as schedule_valid_from,\n cast(reset_schedule_start.schedule_valid_until as timestamp) as schedule_valid_until,\n -- we'll use these to determine which schedule version to associate tickets with.\n cast(date_trunc('day', split_timezones.valid_from) as timestamp) as timezone_valid_from,\n cast(date_trunc('day', split_timezones.valid_until) as timestamp) as timezone_valid_until\n from reset_schedule_start\n left join split_timezones\n on split_timezones.time_zone = reset_schedule_start.time_zone\n\n-- Assemble the final schedule-timezone relationship by determining the correct \n-- schedule_valid_from and schedule_valid_until based on overlapping periods \n-- between the schedule and timezone. \n), final_schedule as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n timezone_valid_from,\n timezone_valid_until,\n -- Be very careful if changing the order of these case whens--it does matter!\n case\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then schedule_valid_from\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then timezone_valid_from\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_from\n end as schedule_valid_from,\n case\n -- timezone that a schedule end falls within\n when schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until\n then schedule_valid_until\n -- timezone that a schedule start falls within\n when schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until\n then timezone_valid_until\n -- timezones that fall completely within the bounds of the schedule\n when timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until\n then timezone_valid_until\n end as schedule_valid_until\n\n from schedule_timezones\n\n -- Filter records based on whether the schedule periods overlap with timezone periods. Capture\n -- when a schedule start or end falls within a time zone, and also capture timezones that exist\n -- entirely within the bounds of a schedule. \n -- timezone that a schedule start falls within\n where (schedule_valid_from >= timezone_valid_from and schedule_valid_from < timezone_valid_until)\n -- timezone that a schedule end falls within\n or (schedule_valid_until >= timezone_valid_from and schedule_valid_until < timezone_valid_until)\n -- timezones that fall completely within the bounds of the schedule\n or (timezone_valid_from >= schedule_valid_from and timezone_valid_until < schedule_valid_until)\n\n\n\n), final as (\n select\n schedule_id,\n schedule_id_index,\n time_zone,\n schedule_name,\n offset_minutes,\n start_time_utc,\n end_time_utc,\n schedule_valid_from,\n schedule_valid_until,\n -- use dbt_date.week_start to ensure we truncate to Sunday\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_valid_from + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as schedule_starting_sunday,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_valid_until + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as schedule_ending_sunday,\n -- Check if the start fo the schedule was from a schedule or timezone change for tracking downstream.\n case when schedule_valid_from = timezone_valid_from\n then 'timezone'\n else 'schedule'\n end as change_type\n from final_schedule\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__timezone_daylight", "sql": " __dbt__cte__int_zendesk__timezone_daylight as (\n\n\nwith timezone as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n now() + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n now() + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(now() as date)\n\n), final as (\n select\n lower(time_zone) as time_zone,\n offset_minutes,\n cast(valid_from as timestamp) as valid_from,\n cast(valid_until as timestamp) as valid_until\n from split_timezones\n)\n\nselect * \nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_group": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_group.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_group.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_group", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_group"], "alias": "int_zendesk__ticket_historical_group", "checksum": {"name": "sha256", "checksum": "7d4d72f5d6a7ef73a23ad4be966b00683532fe2a11c9729a8d640752ebee1adc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513415.9810402, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_group\"", "raw_code": "with ticket_group_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_group.sql", "compiled": true, "compiled_code": "with ticket_group_history as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_history", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_history.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_history.sql", "unique_id": "model.zendesk.int_zendesk__schedule_history", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_history"], "alias": "int_zendesk__schedule_history", "checksum": {"name": "sha256", "checksum": "50ea4332bcb87c82939479cc52aaae109243373492ef283e2d0138c21725f224"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513415.982283, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__schedule_history\"", "raw_code": "{{ config(enabled=var('using_schedules', True) and var('using_schedule_histories', False)) }}\n\nwith audit_logs as (\n select\n cast(source_id as {{ dbt.type_string() }}) as schedule_id,\n created_at,\n lower(change_description) as change_description\n from {{ var('audit_log') }}\n where lower(change_description) like '%workweek changed from%'\n\n-- the formats for change_description vary, so it needs to be cleaned\n), audit_logs_enhanced as (\n select \n schedule_id,\n rank() over (partition by schedule_id order by created_at desc) as schedule_id_index,\n created_at,\n -- Clean up the change_description, sometimes has random html stuff in it\n replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(change_description,\n 'workweek changed from', ''), \n '"', '\"'), \n 'amp;', ''), \n '=>', ':'), ':mon:', '\"mon\":'), ':tue:', '\"tue\":'), ':wed:', '\"wed\":'), ':thu:', '\"thu\":'), ':fri:', '\"fri\":'), ':sat:', '\"sat\":'), ':sun:', '\"sun\":')\n as change_description_cleaned\n from audit_logs\n\n), split_to_from as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n cast(created_at as date) as valid_from,\n -- each change_description has two parts: 1-from the old schedule 2-to the new schedule.\n {{ dbt.split_part('change_description_cleaned', \"' to '\", 1) }} as schedule_change_from,\n {{ dbt.split_part('change_description_cleaned', \"' to '\", 2) }} as schedule_change\n from audit_logs_enhanced\n\n), find_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n schedule_change_from,\n schedule_change,\n row_number() over (\n partition by schedule_id, valid_from -- valid from is type date\n -- ordering to get the latest change when there are multiple on one day\n order by schedule_id_index, schedule_change_from -- use the length of schedule_change_from to tie break, which will deprioritize empty \"from\" schedules\n ) as row_number\n from split_to_from\n\n-- multiple changes can occur on one day, so we will keep only the latest change in a day.\n), consolidate_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n lead(valid_from) over (\n partition by schedule_id order by schedule_id_index desc) as valid_until,\n schedule_change\n from find_same_day_changes\n where row_number = 1\n\n-- Creates a record for each day of the week for each schedule_change event.\n-- This is done by iterating over the days of the week, extracting the corresponding \n-- schedule data for each day, and unioning the results after each iteration.\n), split_days as (\n {% set days_of_week = {'sun': 0, 'mon': 1, 'tue': 2, 'wed': 3, 'thu': 4, 'fri': 5, 'sat': 6} %}\n {% for day, day_number in days_of_week.items() %}\n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n '{{ day }}' as day_of_week,\n cast('{{ day_number }}' as {{ dbt.type_int() }}) as day_of_week_number,\n {{ zendesk.regex_extract('schedule_change', day) }} as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n {% if not loop.last %}union all{% endif %}\n {% endfor %}\n\n-- A single day may contain multiple start and stop times, so we need to generate a separate record for each.\n-- The day_of_week_schedule is structured like a JSON string, requiring warehouse-specific logic to flatten it into individual records.\n{% if target.type == 'redshift' %}\n-- using PartiQL syntax to work with redshift's SUPER types, which requires an extra CTE\n), redshift_parse_schedule as (\n -- Redshift requires another CTE for unnesting \n select \n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n day_of_week,\n day_of_week_number,\n day_of_week_schedule,\n json_parse('[' || replace(replace(day_of_week_schedule, ', ', ','), ',', '},{') || ']') as json_schedule\n\n from split_days\n where day_of_week_schedule != '{}' -- exclude when the day_of_week_schedule in empty. \n\n), unnested_schedules as (\n select \n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n day_of_week,\n day_of_week_number,\n -- go back to strings\n cast(day_of_week_schedule as {{ dbt.type_string() }}) as day_of_week_schedule,\n {{ clean_schedule('JSON_SERIALIZE(unnested_schedule)') }} as cleaned_unnested_schedule\n \n from redshift_parse_schedule as schedules, schedules.json_schedule as unnested_schedule\n\n{% else %}\n), unnested_schedules as (\n select\n split_days.*,\n\n {%- if target.type == 'bigquery' %}\n {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule\n from split_days\n cross join unnest(json_extract_array('[' || replace(day_of_week_schedule, ',', '},{') || ']', '$')) as unnested_schedule\n\n {%- elif target.type == 'snowflake' %}\n unnested_schedule.key || ':' || unnested_schedule.value as cleaned_unnested_schedule\n from split_days\n cross join lateral flatten(input => parse_json(replace(replace(day_of_week_schedule, '\\}\\}', '\\}'), '\\{\\{', '\\{'))) as unnested_schedule\n\n {%- elif target.type == 'postgres' %}\n {{ clean_schedule('unnested_schedule::text') }} as cleaned_unnested_schedule\n from split_days\n cross join lateral jsonb_array_elements(('[' || replace(day_of_week_schedule, ',', '},{') || ']')::jsonb) as unnested_schedule\n\n {%- elif target.type in ('databricks', 'spark') %}\n {{ clean_schedule('unnested_schedule') }} as cleaned_unnested_schedule\n from split_days\n lateral view explode(from_json(concat('[', replace(day_of_week_schedule, ',', '},{'), ']'), 'array')) as unnested_schedule\n\n {% else %}\n cast(null as {{ dbt.type_string() }}) as cleaned_unnested_schedule\n from split_days\n {%- endif %}\n\n{% endif %}\n\n-- Each cleaned_unnested_schedule will have the format hh:mm:hh:mm, so we can extract each time part. \n), split_times as (\n select \n unnested_schedules.*,\n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 1) }}, ' ') as {{ dbt.type_int() }}) as start_time_hh, \n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 2) }}, ' ') as {{ dbt.type_int() }}) as start_time_mm, \n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 3) }}, ' ') as {{ dbt.type_int() }}) as end_time_hh, \n cast(nullif({{ dbt.split_part('cleaned_unnested_schedule', \"':'\", 4) }}, ' ') as {{ dbt.type_int() }}) as end_time_mm\n from unnested_schedules\n\n-- Calculate the start_time and end_time as minutes from Sunday\n), calculate_start_end_times as (\n select\n schedule_id,\n schedule_id_index,\n start_time_hh * 60 + start_time_mm + 24 * 60 * day_of_week_number as start_time,\n end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time,\n valid_from,\n valid_until,\n day_of_week,\n day_of_week_number\n from split_times\n)\n\nselect * \nfrom calculate_start_end_times", "language": "sql", "refs": [{"name": "stg_zendesk__audit_log", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.split_part", "macro.dbt.type_int", "macro.zendesk.regex_extract", "macro.zendesk.clean_schedule"], "nodes": ["model.zendesk_source.stg_zendesk__audit_log"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_history.sql", "compiled": true, "compiled_code": "\n\nwith audit_logs as (\n select\n cast(source_id as TEXT) as schedule_id,\n created_at,\n lower(change_description) as change_description\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log\"\n where lower(change_description) like '%workweek changed from%'\n\n-- the formats for change_description vary, so it needs to be cleaned\n), audit_logs_enhanced as (\n select \n schedule_id,\n rank() over (partition by schedule_id order by created_at desc) as schedule_id_index,\n created_at,\n -- Clean up the change_description, sometimes has random html stuff in it\n replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(change_description,\n 'workweek changed from', ''), \n '"', '\"'), \n 'amp;', ''), \n '=>', ':'), ':mon:', '\"mon\":'), ':tue:', '\"tue\":'), ':wed:', '\"wed\":'), ':thu:', '\"thu\":'), ':fri:', '\"fri\":'), ':sat:', '\"sat\":'), ':sun:', '\"sun\":')\n as change_description_cleaned\n from audit_logs\n\n), split_to_from as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n cast(created_at as date) as valid_from,\n -- each change_description has two parts: 1-from the old schedule 2-to the new schedule.\n \n\n \n \n\n split_part(\n change_description_cleaned,\n ' to ',\n 1\n )\n\n\n \n\n as schedule_change_from,\n \n\n \n \n\n split_part(\n change_description_cleaned,\n ' to ',\n 2\n )\n\n\n \n\n as schedule_change\n from audit_logs_enhanced\n\n), find_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n schedule_change_from,\n schedule_change,\n row_number() over (\n partition by schedule_id, valid_from -- valid from is type date\n -- ordering to get the latest change when there are multiple on one day\n order by schedule_id_index, schedule_change_from -- use the length of schedule_change_from to tie break, which will deprioritize empty \"from\" schedules\n ) as row_number\n from split_to_from\n\n-- multiple changes can occur on one day, so we will keep only the latest change in a day.\n), consolidate_same_day_changes as (\n select\n schedule_id,\n schedule_id_index,\n created_at,\n valid_from,\n lead(valid_from) over (\n partition by schedule_id order by schedule_id_index desc) as valid_until,\n schedule_change\n from find_same_day_changes\n where row_number = 1\n\n-- Creates a record for each day of the week for each schedule_change event.\n-- This is done by iterating over the days of the week, extracting the corresponding \n-- schedule data for each day, and unioning the results after each iteration.\n), split_days as (\n \n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'sun' as day_of_week,\n cast('0' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?sun.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'mon' as day_of_week,\n cast('1' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?mon.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'tue' as day_of_week,\n cast('2' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?tue.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'wed' as day_of_week,\n cast('3' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?wed.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'thu' as day_of_week,\n cast('4' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?thu.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'fri' as day_of_week,\n cast('5' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?fri.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n union all\n \n select\n schedule_id,\n schedule_id_index,\n valid_from,\n valid_until,\n schedule_change,\n 'sat' as day_of_week,\n cast('6' as integer) as day_of_week_number,\n \n \n\n (regexp_matches(schedule_change, '.*?sat.*?({.*?})'))[1] as day_of_week_schedule -- Extracts the schedule data specific to the current day from the schedule_change field.\n from consolidate_same_day_changes\n -- Exclude records with a null valid_until, which indicates it is the current schedule. \n -- We will to pull in the live schedule downstream, which is necessary when not using schedule histories.\n where valid_until is not null\n\n \n \n\n-- A single day may contain multiple start and stop times, so we need to generate a separate record for each.\n-- The day_of_week_schedule is structured like a JSON string, requiring warehouse-specific logic to flatten it into individual records.\n\n), unnested_schedules as (\n select\n split_days.*,\n replace(replace(replace(replace(cast(unnested_schedule::text as TEXT), '{', ''), '}', ''), '\"', ''), ' ', '') as cleaned_unnested_schedule\n from split_days\n cross join lateral jsonb_array_elements(('[' || replace(day_of_week_schedule, ',', '},{') || ']')::jsonb) as unnested_schedule\n\n\n\n-- Each cleaned_unnested_schedule will have the format hh:mm:hh:mm, so we can extract each time part. \n), split_times as (\n select \n unnested_schedules.*,\n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 1\n )\n\n\n \n\n, ' ') as integer) as start_time_hh, \n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 2\n )\n\n\n \n\n, ' ') as integer) as start_time_mm, \n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 3\n )\n\n\n \n\n, ' ') as integer) as end_time_hh, \n cast(nullif(\n\n \n \n\n split_part(\n cleaned_unnested_schedule,\n ':',\n 4\n )\n\n\n \n\n, ' ') as integer) as end_time_mm\n from unnested_schedules\n\n-- Calculate the start_time and end_time as minutes from Sunday\n), calculate_start_end_times as (\n select\n schedule_id,\n schedule_id_index,\n start_time_hh * 60 + start_time_mm + 24 * 60 * day_of_week_number as start_time,\n end_time_hh * 60 + end_time_mm + 24 * 60 * day_of_week_number as end_time,\n valid_from,\n valid_until,\n day_of_week,\n day_of_week_number\n from split_times\n)\n\nselect * \nfrom calculate_start_end_times", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_holiday": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__schedule_holiday", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_holiday.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_holiday.sql", "unique_id": "model.zendesk.int_zendesk__schedule_holiday", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_holiday"], "alias": "int_zendesk__schedule_holiday", "checksum": {"name": "sha256", "checksum": "e4b4f3ee22aa0e8c5c04c65cb78c232aa9d5aa4ab4908a96d0f2d20293b3b52d"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1728513415.997117, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True) and var('using_holidays', True)) }}\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may \n change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream \n to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time).\n*/\n\n\nwith schedule as (\n select *\n from {{ var('schedule') }} \n\n), schedule_holiday as (\n select *\n from {{ var('schedule_holiday') }} \n\n-- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start.\n), schedule_holiday_ranges as (\n select\n holiday_name,\n schedule_id,\n cast({{ dbt.date_trunc('day', 'holiday_start_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_from,\n cast({{ dbt.date_trunc('day', 'holiday_end_date_at') }} as {{ dbt.type_timestamp() }}) as holiday_valid_until,\n cast({{ dbt_date.week_start('holiday_start_date_at','UTC') }} as {{ dbt.type_timestamp() }}) as holiday_starting_sunday,\n cast({{ dbt_date.week_start(dbt.dateadd('week', 1, 'holiday_end_date_at'),'UTC') }} as {{ dbt.type_timestamp() }}) as holiday_ending_sunday,\n -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays.\n {{ dbt.datediff('holiday_start_date_at', 'holiday_end_date_at', 'week') }} + 1 as holiday_weeks_spanned\n from schedule_holiday\n\n-- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte.\n), expanded_holidays as (\n select\n schedule_holiday_ranges.*,\n cast(week_numbers.generated_number as {{ dbt.type_int() }}) as holiday_week_number\n from schedule_holiday_ranges\n -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks\n cross join ({{ dbt_utils.generate_series(upper_bound=52) }}) as week_numbers\n where schedule_holiday_ranges.holiday_weeks_spanned > 1\n and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned\n\n-- Define start and end times for each segment of a multi-week holiday.\n), split_multiweek_holidays as (\n\n -- Business as usual for holidays that fall within a single week.\n select\n holiday_name,\n schedule_id,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_weeks_spanned\n from schedule_holiday_ranges\n where holiday_weeks_spanned = 1\n\n union all\n\n -- Split holidays by week that span multiple weeks since the schedule spine is based on weeks.\n select\n holiday_name,\n schedule_id,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_valid_from\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})\n end as holiday_valid_from,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_valid_until\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', -1, dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday')) }} as {{ dbt.type_timestamp() }}) -- saturday\n end as holiday_valid_until,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_starting_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', '(holiday_week_number - 1) * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})\n end as holiday_starting_sunday,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_ending_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast({{ dbt.dateadd('day', 'holiday_week_number * 7', 'holiday_starting_sunday') }} as {{ dbt.type_timestamp() }})\n end as holiday_ending_sunday,\n holiday_weeks_spanned\n from expanded_holidays\n where holiday_weeks_spanned > 1\n\n-- Create a record for each the holiday start and holiday end for each week to use downstream.\n), split_holidays as (\n -- Creates a record that will be used for the time before a holiday\n select\n split_multiweek_holidays.*,\n holiday_valid_from as holiday_date,\n '0_gap' as holiday_start_or_end\n from split_multiweek_holidays\n\n union all\n\n -- Creates another record that will be used for the holiday itself\n select\n split_multiweek_holidays.*,\n holiday_valid_until as holiday_date,\n '1_holiday' as holiday_start_or_end\n from split_multiweek_holidays\n)\n\nselect *\nfrom split_holidays", "language": "sql", "refs": [{"name": "stg_zendesk__schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.type_timestamp", "macro.dbt_date.week_start", "macro.dbt.dateadd", "macro.dbt.datediff", "macro.dbt.type_int", "macro.dbt_utils.generate_series"], "nodes": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_holiday.sql", "compiled": true, "compiled_code": "\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may \n change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream \n to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time).\n*/\n\n\nwith schedule as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\" \n\n), schedule_holiday as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\" \n\n-- Converts holiday_start_date_at and holiday_end_date_at into daily timestamps and finds the week starts/ends using week_start.\n), schedule_holiday_ranges as (\n select\n holiday_name,\n schedule_id,\n cast(date_trunc('day', holiday_start_date_at) as timestamp) as holiday_valid_from,\n cast(date_trunc('day', holiday_end_date_at) as timestamp) as holiday_valid_until,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n holiday_start_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_starting_sunday,\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n \n\n holiday_end_date_at + ((interval '1 week') * (1))\n\n + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_ending_sunday,\n -- Since the spine is based on weeks, holidays that span multiple weeks need to be broken up in to weeks. First step is to find those holidays.\n \n (\n ((holiday_end_date_at)::date - (holiday_start_date_at)::date)\n / 7 + case\n when date_part('dow', (holiday_start_date_at)::timestamp) <= date_part('dow', (holiday_end_date_at)::timestamp) then\n case when holiday_start_date_at <= holiday_end_date_at then 0 else -1 end\n else\n case when holiday_start_date_at <= holiday_end_date_at then 1 else 0 end\n end)\n + 1 as holiday_weeks_spanned\n from schedule_holiday\n\n-- Creates a record for each week of multi-week holidays. Update valid_from and valid_until in the next cte.\n), expanded_holidays as (\n select\n schedule_holiday_ranges.*,\n cast(week_numbers.generated_number as integer) as holiday_week_number\n from schedule_holiday_ranges\n -- Generate a sequence of numbers from 0 to the max number of weeks spanned, assuming a holiday won't span more than 52 weeks\n cross join (\n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n) as week_numbers\n where schedule_holiday_ranges.holiday_weeks_spanned > 1\n and week_numbers.generated_number <= schedule_holiday_ranges.holiday_weeks_spanned\n\n-- Define start and end times for each segment of a multi-week holiday.\n), split_multiweek_holidays as (\n\n -- Business as usual for holidays that fall within a single week.\n select\n holiday_name,\n schedule_id,\n holiday_valid_from,\n holiday_valid_until,\n holiday_starting_sunday,\n holiday_ending_sunday,\n holiday_weeks_spanned\n from schedule_holiday_ranges\n where holiday_weeks_spanned = 1\n\n union all\n\n -- Split holidays by week that span multiple weeks since the schedule spine is based on weeks.\n select\n holiday_name,\n schedule_id,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_valid_from\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * ((holiday_week_number - 1) * 7))\n\n as timestamp)\n end as holiday_valid_from,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_valid_until\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n \n\n holiday_starting_sunday + ((interval '1 day') * (holiday_week_number * 7))\n\n + ((interval '1 day') * (-1))\n\n as timestamp) -- saturday\n end as holiday_valid_until,\n case \n when holiday_week_number = 1 -- first week in multiweek holiday\n then holiday_starting_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * ((holiday_week_number - 1) * 7))\n\n as timestamp)\n end as holiday_starting_sunday,\n case \n when holiday_week_number = holiday_weeks_spanned -- last week in multiweek holiday\n then holiday_ending_sunday\n -- We have to use days in case warehouse does not truncate to Sunday.\n else cast(\n\n holiday_starting_sunday + ((interval '1 day') * (holiday_week_number * 7))\n\n as timestamp)\n end as holiday_ending_sunday,\n holiday_weeks_spanned\n from expanded_holidays\n where holiday_weeks_spanned > 1\n\n-- Create a record for each the holiday start and holiday end for each week to use downstream.\n), split_holidays as (\n -- Creates a record that will be used for the time before a holiday\n select\n split_multiweek_holidays.*,\n holiday_valid_from as holiday_date,\n '0_gap' as holiday_start_or_end\n from split_multiweek_holidays\n\n union all\n\n -- Creates another record that will be used for the holiday itself\n select\n split_multiweek_holidays.*,\n holiday_valid_until as holiday_date,\n '1_holiday' as holiday_start_or_end\n from split_multiweek_holidays\n)\n\nselect *\nfrom split_holidays", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_updates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__requester_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__requester_updates.sql", "original_file_path": "models/intermediate/int_zendesk__requester_updates.sql", "unique_id": "model.zendesk.int_zendesk__requester_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__requester_updates"], "alias": "int_zendesk__requester_updates", "checksum": {"name": "sha256", "checksum": "b2d14b09db3cadfb56e4b3dcb55c4f9000e670e3c7c29ef89b249e626e8ba103"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513416.006282, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__requester_updates\"", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__requester_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_satisfaction.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_satisfaction"], "alias": "int_zendesk__ticket_historical_satisfaction", "checksum": {"name": "sha256", "checksum": "dce9b5b8705d72688802f99250a8f8a34b8791c3cb440f85efa11f09ebfe3e1d"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513416.007671, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"", "raw_code": "with satisfaction_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "compiled": true, "compiled_code": "with satisfaction_updates as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__updates\"\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__latest_ticket_form": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__latest_ticket_form.sql", "original_file_path": "models/intermediate/int_zendesk__latest_ticket_form.sql", "unique_id": "model.zendesk.int_zendesk__latest_ticket_form", "fqn": ["zendesk", "intermediate", "int_zendesk__latest_ticket_form"], "alias": "int_zendesk__latest_ticket_form", "checksum": {"name": "sha256", "checksum": "906a97576bff9f4fead3b0ed4632aa8a04b94f523e62b0e05425770213f78ea5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1728513416.008972, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__latest_ticket_form\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith ticket_form_history as (\n select *\n from {{ ref('stg_zendesk__ticket_form_history') }}\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__latest_ticket_form.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith ticket_form_history as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__ticket_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_aggregates"], "alias": "int_zendesk__ticket_aggregates", "checksum": {"name": "sha256", "checksum": "cef0c080fae7a2b361b077473aa1ccfd4bfa472469b9006038aa3866a5bf8b50"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513416.012309, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__ticket_aggregates\"", "raw_code": "with tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_tags as (\n\n select *\n from {{ ref('stg_zendesk__ticket_tag') }}\n\n), brands as (\n\n select *\n from {{ ref('stg_zendesk__brand') }}\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n {{ fivetran_utils.string_agg( 'ticket_tags.tags', \"', '\" )}} as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag", "model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_aggregates.sql", "compiled": true, "compiled_code": "with tickets as (\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_tags as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag\"\n\n), brands as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n \n string_agg(ticket_tags.tags, ', ')\n\n as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__organization_aggregates": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "int_zendesk__organization_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__organization_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__organization_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__organization_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__organization_aggregates"], "alias": "int_zendesk__organization_aggregates", "checksum": {"name": "sha256", "checksum": "a16300f45d2cb0bd1c26dfec62e967a047095b92f340974bfef56178bfff6cf9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513416.01579, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"int_zendesk__organization_aggregates\"", "raw_code": "with organizations as (\n select * \n from {{ ref('stg_zendesk__organization') }}\n\n--If you use organization tags this will be included, if not it will be ignored.\n{% if var('using_organization_tags', True) %}\n), organization_tags as (\n select * \n from {{ ref('stg_zendesk__organization_tag') }}\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('organization_tags.tags', \"', '\" ) }} as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n{% endif %}\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n{% if var('using_domain_names', True) %}\n), domain_names as (\n\n select *\n from {{ ref('stg_zendesk__domain_name') }}\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('domain_names.domain_name', \"', '\" ) }} as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n{% endif %}\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,tag_aggregates.organization_tags\n {% endif %}\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,domain_aggregates.domain_names\n {% endif %}\n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n left join domain_aggregates\n using(organization_id)\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n left join tag_aggregates\n using(organization_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag", "package": null, "version": null}, {"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag", "model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__organization_aggregates.sql", "compiled": true, "compiled_code": "with organizations as (\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\n\n--If you use organization tags this will be included, if not it will be ignored.\n\n), organization_tags as (\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag\"\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(organization_tags.tags, ', ')\n\n as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n\n), domain_names as (\n\n select *\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(domain_names.domain_name, ', ')\n\n as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,tag_aggregates.organization_tags\n \n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,domain_aggregates.domain_names\n \n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n left join domain_aggregates\n using(organization_id)\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n left join tag_aggregates\n using(organization_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "operation.zendesk.zendesk-on-run-start-0": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "zendesk-on-run-start-0", "resource_type": "operation", "package_name": "zendesk", "path": "hooks/zendesk-on-run-start-0.sql", "original_file_path": "./dbt_project.yml", "unique_id": "operation.zendesk.zendesk-on-run-start-0", "fqn": ["zendesk", "hooks", "zendesk-on-run-start-0"], "alias": "zendesk-on-run-start-0", "checksum": {"name": "sha256", "checksum": "1a552926900a1bf3cf29e89f5cb389233833bc1aec2eeba355c4843484e5bc30"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": ["on-run-start"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1728513416.082197, "relation_name": null, "raw_code": "{{ fivetran_utils.empty_variable_warning(\"ticket_field_history_columns\", \"zendesk_ticket_field_history\") }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.empty_variable_warning"], "nodes": []}, "compiled_path": "target/compiled/zendesk/./dbt_project.yml/hooks/zendesk-on-run-start-0.sql", "compiled": true, "compiled_code": "\n\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "index": 0}, "model.zendesk_source.stg_zendesk__user_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user_tag.sql", "original_file_path": "models/stg_zendesk__user_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag", "fqn": ["zendesk_source", "stg_zendesk__user_tag"], "alias": "stg_zendesk__user_tag", "checksum": {"name": "sha256", "checksum": "0aabe5c461e492bc7afb162a0dcb6e3334cca4c60093eb5be52b74e5dbfa429b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.5137842, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag\"", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__user_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tag_tmp')),\n staging_columns=get_user_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_tag.sql", "original_file_path": "models/stg_zendesk__ticket_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag", "fqn": ["zendesk_source", "stg_zendesk__ticket_tag"], "alias": "stg_zendesk__ticket_tag", "checksum": {"name": "sha256", "checksum": "41ea7cea80e135bf87adfff97bfadecd5c8ee0622d74f9904759305fd6cb7541"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.517884, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tag_tmp')),\n staging_columns=get_ticket_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n {% if target.type == 'redshift' %}\n \"tag\" as tags\n {% else %}\n tag as tags\n {% endif %}\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_tag.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n \n tag as tags\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_field_history.sql", "original_file_path": "models/stg_zendesk__ticket_field_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_field_history"], "alias": "stg_zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "5c165700bdcc50383952e4c645b4d6c42d5410205205c5de889b009dad3b0a10"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_starting_at": {"name": "valid_starting_at", "description": "The time the ticket field value became valid", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_ending_at": {"name": "valid_ending_at", "description": "The time the ticket field value became invalidated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.518861, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_field_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_field_history_tmp')),\n staging_columns=get_ticket_field_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as {{ dbt.type_timestamp() }}) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as {{ dbt.type_timestamp() }}) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_field_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n field_name\n \n as \n \n field_name\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n updated\n \n as \n \n updated\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n value\n \n as \n \n value\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as timestamp) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as timestamp) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule_holiday.sql", "original_file_path": "models/stg_zendesk__schedule_holiday.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday", "fqn": ["zendesk_source", "stg_zendesk__schedule_holiday"], "alias": "stg_zendesk__schedule_holiday", "checksum": {"name": "sha256", "checksum": "b8f96c4a1206c9c9a491dd39c7a186d2d80346600d1f828b535a4185ca3f02e8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Information about holidays for each specified schedule.", "columns": {"end_date_at": {"name": "end_date_at", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_id": {"name": "holiday_id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_name": {"name": "holiday_name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date_at": {"name": "start_date_at", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.521578, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"", "raw_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True) and var('using_holidays', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_holiday_tmp') }}\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_holiday_tmp')),\n staging_columns=get_schedule_holiday_columns()\n )\n }}\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as {{ dbt.type_timestamp() }} ) as _fivetran_synced,\n cast(end_date as {{ dbt.type_timestamp() }} ) as holiday_end_date_at,\n cast(id as {{ dbt.type_string() }} ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as {{ dbt.type_string() }} ) as schedule_id,\n cast(start_date as {{ dbt.type_timestamp() }} ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_schedule_holiday_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule_holiday.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n end_date\n \n as \n \n end_date\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n start_date\n \n as \n \n start_date\n \n\n\n\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as timestamp ) as _fivetran_synced,\n cast(end_date as timestamp ) as holiday_end_date_at,\n cast(id as TEXT ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as TEXT ) as schedule_id,\n cast(start_date as timestamp ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__daylight_time.sql", "original_file_path": "models/stg_zendesk__daylight_time.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time", "fqn": ["zendesk_source", "stg_zendesk__daylight_time"], "alias": "stg_zendesk__daylight_time", "checksum": {"name": "sha256", "checksum": "8bc98221c9781fc37b2424b62b5d72cd62b62c53aa887be08e98114f98530df9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset_minutes": {"name": "daylight_offset_minutes", "description": "Number of **minutes** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.519936, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__daylight_time_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__daylight_time_tmp')),\n staging_columns=get_daylight_time_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_daylight_time_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__daylight_time.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n daylight_end_utc\n \n as \n \n daylight_end_utc\n \n, \n \n \n daylight_offset\n \n as \n \n daylight_offset\n \n, \n \n \n daylight_start_utc\n \n as \n \n daylight_start_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n year\n \n as \n \n year\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization.sql", "original_file_path": "models/stg_zendesk__organization.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization", "fqn": ["zendesk_source", "stg_zendesk__organization"], "alias": "stg_zendesk__organization", "checksum": {"name": "sha256", "checksum": "5fb51f160efdf3ffa60e0a7be33e40e4b59f814d345558631e06fcce160f6329"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"organization_id": {"name": "organization_id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.512109, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tmp')),\n staging_columns=get_organization_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__organization_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_columns", "macro.fivetran_utils.fill_staging_columns", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n shared_comments\n \n as \n \n shared_comments\n \n, \n \n \n shared_tickets\n \n as \n \n shared_tickets\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__time_zone.sql", "original_file_path": "models/stg_zendesk__time_zone.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone", "fqn": ["zendesk_source", "stg_zendesk__time_zone"], "alias": "stg_zendesk__time_zone", "checksum": {"name": "sha256", "checksum": "289f08e30f9298f5b4beed89d28c1ff6a82386ee7c9f5084499eedb8998aa137"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset_minutes": {"name": "standard_offset_minutes", "description": "Standard offset of the timezone (non-daylight savings hours) in minutes.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.520559, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__time_zone_tmp') }}\n\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__time_zone_tmp')),\n staging_columns=get_time_zone_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=1) }} as {{ dbt.type_int() }} ) * 60 +\n (cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=2) }} as {{ dbt.type_int() }} ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}, {"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_time_zone_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.split_part", "macro.dbt.type_int"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__time_zone.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"\n\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n standard_offset\n \n as \n \n standard_offset\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 1\n )\n\n\n \n\n as integer ) * 60 +\n (cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 2\n )\n\n\n \n\n as integer ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__group.sql", "original_file_path": "models/stg_zendesk__group.sql", "unique_id": "model.zendesk_source.stg_zendesk__group", "fqn": ["zendesk_source", "stg_zendesk__group"], "alias": "stg_zendesk__group", "checksum": {"name": "sha256", "checksum": "21a956af3b03e9e49e9e94ade093fa716db9f061e7eb9e209c3ff7f9986b15b9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"group_id": {"name": "group_id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.511212, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__group_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__group_tmp')),\n staging_columns=get_group_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__group_tmp", "package": null, "version": null}, {"name": "stg_zendesk__group_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_group_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__group_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__group.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_comment.sql", "original_file_path": "models/stg_zendesk__ticket_comment.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment", "fqn": ["zendesk_source", "stg_zendesk__ticket_comment"], "alias": "stg_zendesk__ticket_comment", "checksum": {"name": "sha256", "checksum": "ffc2c4310aafe6b90a26e22cdab400e6d4c750faab7ea4d7519b2cf9105d3f16"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"ticket_comment_id": {"name": "ticket_comment_id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_facebook_comment": {"name": "is_facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_tweet": {"name": "is_tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_voice_comment": {"name": "is_voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.51351, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_comment_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_comment_tmp')),\n staging_columns=get_ticket_comment_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n _fivetran_deleted,\n body,\n cast(created as {{ dbt.type_timestamp() }}) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_comment_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_comment.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n body\n \n as \n \n body\n \n, \n cast(null as integer) as \n \n call_duration\n \n , \n cast(null as integer) as \n \n call_id\n \n , \n \n \n created\n \n as \n \n created\n \n, \n \n \n facebook_comment\n \n as \n \n facebook_comment\n \n, \n \n \n id\n \n as \n \n id\n \n, \n cast(null as integer) as \n \n location\n \n , \n \n \n public\n \n as \n \n public\n \n, \n cast(null as integer) as \n \n recording_url\n \n , \n cast(null as timestamp) as \n \n started_at\n \n , \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n cast(null as integer) as \n \n transcription_status\n \n , \n cast(null as integer) as \n \n transcription_text\n \n , \n cast(null as integer) as \n \n trusted\n \n , \n \n \n tweet\n \n as \n \n tweet\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n voice_comment\n \n as \n \n voice_comment\n \n, \n cast(null as integer) as \n \n voice_comment_transcription_visible\n \n \n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n _fivetran_deleted,\n body,\n cast(created as timestamp) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_schedule.sql", "original_file_path": "models/stg_zendesk__ticket_schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule", "fqn": ["zendesk_source", "stg_zendesk__ticket_schedule"], "alias": "stg_zendesk__ticket_schedule", "checksum": {"name": "sha256", "checksum": "69d32ac51b73241f990f8c1a08309cb42e79d0c1b26b99a7060353bfee88066e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.516932, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_schedule_tmp')),\n staging_columns=get_ticket_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(schedule_id as {{ dbt.type_string() }}) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as timestamp) as created_at,\n cast(schedule_id as TEXT) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule.sql", "original_file_path": "models/stg_zendesk__schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule", "fqn": ["zendesk_source", "stg_zendesk__schedule"], "alias": "stg_zendesk__schedule", "checksum": {"name": "sha256", "checksum": "336dabaf980af5f08c6a5f43d04cdfd00146191b0927176fe4add5f65117c673"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The support schedules created with different business hours and holidays.", "columns": {"schedule_id": {"name": "schedule_id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_name": {"name": "schedule_name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.516636, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_tmp')),\n staging_columns=get_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as {{ dbt.type_string() }}) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n end_time\n \n as \n \n end_time\n \n, \n \n \n end_time_utc\n \n as \n \n end_time_utc\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n start_time\n \n as \n \n start_time\n \n, \n \n \n start_time_utc\n \n as \n \n start_time_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as TEXT) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user.sql", "original_file_path": "models/stg_zendesk__user.sql", "unique_id": "model.zendesk_source.stg_zendesk__user", "fqn": ["zendesk_source", "stg_zendesk__user"], "alias": "stg_zendesk__user", "checksum": {"name": "sha256", "checksum": "7227f84c3600cc310217efae6695bc0f6aea11b2392f5709a54d444a772a9d2c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"user_id": {"name": "user_id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active": {"name": "is_active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_suspended": {"name": "is_suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.515702, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__user_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tmp')),\n staging_columns=get_user_columns()\n )\n }}\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n _fivetran_deleted,\n cast(last_login_at as {{ dbt.type_timestamp() }}) as last_login_at,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n email,\n name,\n organization_id,\n phone,\n {% if var('internal_user_criteria', false) -%}\n case \n when role in ('admin', 'agent') then role\n when {{ var('internal_user_criteria', false) }} then 'agent'\n else role end as role,\n {% else -%}\n role,\n {% endif -%}\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__user_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n active\n \n as \n \n active\n \n, \n \n \n alias\n \n as \n \n alias\n \n, \n \n \n authenticity_token\n \n as \n \n authenticity_token\n \n, \n \n \n chat_only\n \n as \n \n chat_only\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n email\n \n as \n \n email\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n last_login_at\n \n as \n \n last_login_at\n \n, \n \n \n locale\n \n as \n \n locale\n \n, \n \n \n locale_id\n \n as \n \n locale_id\n \n, \n \n \n moderator\n \n as \n \n moderator\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n only_private_comments\n \n as \n \n only_private_comments\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n phone\n \n as \n \n phone\n \n, \n \n \n remote_photo_url\n \n as \n \n remote_photo_url\n \n, \n \n \n restricted_agent\n \n as \n \n restricted_agent\n \n, \n \n \n role\n \n as \n \n role\n \n, \n \n \n shared\n \n as \n \n shared\n \n, \n \n \n shared_agent\n \n as \n \n shared_agent\n \n, \n \n \n signature\n \n as \n \n signature\n \n, \n \n \n suspended\n \n as \n \n suspended\n \n, \n \n \n ticket_restriction\n \n as \n \n ticket_restriction\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n two_factor_auth_enabled\n \n as \n \n two_factor_auth_enabled\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n verified\n \n as \n \n verified\n \n\n\n\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n _fivetran_deleted,\n cast(last_login_at as timestamp) as last_login_at,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n email,\n name,\n organization_id,\n phone,\n role,\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__brand.sql", "original_file_path": "models/stg_zendesk__brand.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand", "fqn": ["zendesk_source", "stg_zendesk__brand"], "alias": "stg_zendesk__brand", "checksum": {"name": "sha256", "checksum": "106699200d371f2fac9fe94ce084a357331b215d4130195e1e94d2d07c6d169c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"brand_id": {"name": "brand_id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.510216, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__brand_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__brand_tmp')),\n staging_columns=get_brand_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__brand_tmp", "package": null, "version": null}, {"name": "stg_zendesk__brand_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_brand_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__brand_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__brand.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n brand_url\n \n as \n \n brand_url\n \n, \n \n \n has_help_center\n \n as \n \n has_help_center\n \n, \n \n \n help_center_state\n \n as \n \n help_center_state\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n logo_content_type\n \n as \n \n logo_content_type\n \n, \n \n \n logo_content_url\n \n as \n \n logo_content_url\n \n, \n \n \n logo_deleted\n \n as \n \n logo_deleted\n \n, \n \n \n logo_file_name\n \n as \n \n logo_file_name\n \n, \n \n \n logo_height\n \n as \n \n logo_height\n \n, \n \n \n logo_id\n \n as \n \n logo_id\n \n, \n \n \n logo_inline\n \n as \n \n logo_inline\n \n, \n \n \n logo_mapped_content_url\n \n as \n \n logo_mapped_content_url\n \n, \n \n \n logo_size\n \n as \n \n logo_size\n \n, \n \n \n logo_url\n \n as \n \n logo_url\n \n, \n \n \n logo_width\n \n as \n \n logo_width\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n subdomain\n \n as \n \n subdomain\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_form_history.sql", "original_file_path": "models/stg_zendesk__ticket_form_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_form_history"], "alias": "stg_zendesk__ticket_form_history", "checksum": {"name": "sha256", "checksum": "1e70e9a0b2dfce82e649a8a0507d59d6f3f2832429191ea67988ba0dfd1017cf"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"ticket_form_id": {"name": "ticket_form_id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.517465, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_form_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_form_history_tmp')),\n staging_columns=get_ticket_form_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_form_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_form_history.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n display_name\n \n as \n \n display_name\n \n, \n \n \n end_user_visible\n \n as \n \n end_user_visible\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__audit_log": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__audit_log.sql", "original_file_path": "models/stg_zendesk__audit_log.sql", "unique_id": "model.zendesk_source.stg_zendesk__audit_log", "fqn": ["zendesk_source", "stg_zendesk__audit_log"], "alias": "stg_zendesk__audit_log", "checksum": {"name": "sha256", "checksum": "1ac0a9848b3c25cfa7f2e52ca7a4a81d1bbfb5247f27c952197420663df36b2b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The `audit_log` table captures historical changes and actions within Zendesk. It provides a record of modifications made to tickets, schedules, and other objects, allowing for a detailed audit trail. Each row represents an action performed by an actor, including the time of the action, the affected entity, and a description of the changes. This table is especially useful for tracking schedule modifications and maintaining a history of schedule changes.\n", "columns": {"audit_log_id": {"name": "audit_log_id", "description": "The unique identifier for each audit log entry, representing a distinct action or change.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "action": {"name": "action", "description": "Describes the specific action performed within Zendesk, such as ticket updates or schedule modifications.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "actor_id": {"name": "actor_id", "description": "The unique identifier of the user or system responsible for performing the action.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "change_description": {"name": "change_description", "description": "A detailed description of the changes made during the action, capturing what was altered.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The timestamp indicating when the action was performed and recorded in the audit log.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_id": {"name": "source_id", "description": "The unique identifier of the entity affected by the action, such as a ticket or schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_label": {"name": "source_label", "description": "A label that provides additional context about the affected entity, typically related to its type or name.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_type": {"name": "source_type", "description": "Specifies the type of entity impacted by the action, such as a ticket, schedule, or user.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_synced": {"name": "_fivetran_synced", "description": "The timestamp when the record was last synchronized by Fivetran, used to track data freshness.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.508357, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log\"", "raw_code": "{{ config(enabled=var('using_schedules', True) and var('using_schedule_histories', False)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__audit_log_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__audit_log_tmp')),\n staging_columns=get_audit_log_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n select \n cast(id as {{ dbt.type_string() }}) as audit_log_id,\n action,\n actor_id,\n change_description,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n source_id,\n source_label,\n source_type,\n _fivetran_synced\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__audit_log_tmp", "package": null, "version": null}, {"name": "stg_zendesk__audit_log_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_audit_log_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_string", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__audit_log_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__audit_log.sql", "compiled": true, "compiled_code": "\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n action\n \n as \n \n action\n \n, \n \n \n actor_id\n \n as \n \n actor_id\n \n, \n \n \n change_description\n \n as \n \n change_description\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n source_id\n \n as \n \n source_id\n \n, \n \n \n source_label\n \n as \n \n source_label\n \n, \n \n \n source_type\n \n as \n \n source_type\n \n\n\n\n \n from base\n),\n\nfinal as (\n select \n cast(id as TEXT) as audit_log_id,\n action,\n actor_id,\n change_description,\n cast(created_at as timestamp) as created_at,\n source_id,\n source_label,\n source_type,\n _fivetran_synced\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__domain_name.sql", "original_file_path": "models/stg_zendesk__domain_name.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name", "fqn": ["zendesk_source", "stg_zendesk__domain_name"], "alias": "stg_zendesk__domain_name", "checksum": {"name": "sha256", "checksum": "8c3a4735e0cdea5a463eefc3c6820d15d622857af45dab942410dc64a0ac4bda"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.51089, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__domain_name_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__domain_name_tmp')),\n staging_columns=get_domain_name_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}, {"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_domain_name_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__domain_name.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n domain_name\n \n as \n \n domain_name\n \n, \n \n \n index\n \n as \n \n index\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization_tag.sql", "original_file_path": "models/stg_zendesk__organization_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag", "fqn": ["zendesk_source", "stg_zendesk__organization_tag"], "alias": "stg_zendesk__organization_tag", "checksum": {"name": "sha256", "checksum": "15f1f4014e4ba78ae7992f28c61e3926b7cd12c6bb32efc7b516db93c1e20d82"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.5116339, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag\"", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tag_tmp')),\n staging_columns=get_organization_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket.sql", "original_file_path": "models/stg_zendesk__ticket.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket", "fqn": ["zendesk_source", "stg_zendesk__ticket"], "alias": "stg_zendesk__ticket", "checksum": {"name": "sha256", "checksum": "8a1201482d9f933a720698fa97c33d1499d5aeeaecd3706d97b3864b54eea531"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.509226, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tmp')),\n staging_columns=get_ticket_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n _fivetran_deleted,\n assignee_id,\n brand_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__ticket_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n cast(null as boolean) as \n \n _fivetran_deleted\n \n , \n \n \n allow_channelback\n \n as \n \n allow_channelback\n \n, \n \n \n assignee_id\n \n as \n \n assignee_id\n \n, \n \n \n brand_id\n \n as \n \n brand_id\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n description\n \n as \n \n description\n \n, \n \n \n due_at\n \n as \n \n due_at\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n forum_topic_id\n \n as \n \n forum_topic_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n has_incidents\n \n as \n \n has_incidents\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n is_public\n \n as \n \n is_public\n \n, \n \n \n merged_ticket_ids\n \n as \n \n merged_ticket_ids\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n priority\n \n as \n \n priority\n \n, \n \n \n problem_id\n \n as \n \n problem_id\n \n, \n \n \n recipient\n \n as \n \n recipient\n \n, \n \n \n requester_id\n \n as \n \n requester_id\n \n, \n \n \n status\n \n as \n \n status\n \n, \n \n \n subject\n \n as \n \n subject\n \n, \n \n \n submitter_id\n \n as \n \n submitter_id\n \n, \n cast(null as integer) as \n \n system_ccs\n \n , \n \n \n system_client\n \n as \n \n system_client\n \n, \n cast(null as TEXT) as \n \n system_ip_address\n \n , \n cast(null as integer) as \n \n system_json_email_identifier\n \n , \n cast(null as float) as \n \n system_latitude\n \n , \n cast(null as TEXT) as \n \n system_location\n \n , \n cast(null as float) as \n \n system_longitude\n \n , \n cast(null as integer) as \n \n system_machine_generated\n \n , \n cast(null as integer) as \n \n system_message_id\n \n , \n cast(null as integer) as \n \n system_raw_email_identifier\n \n , \n \n \n ticket_form_id\n \n as \n \n ticket_form_id\n \n, \n \n \n type\n \n as \n \n type\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n via_channel\n \n as \n \n via_channel\n \n, \n \n \n via_source_from_address\n \n as \n \n via_source_from_address\n \n, \n \n \n via_source_from_id\n \n as \n \n via_source_from_id\n \n, \n \n \n via_source_from_title\n \n as \n \n via_source_from_title\n \n, \n \n \n via_source_rel\n \n as \n \n via_source_rel\n \n, \n \n \n via_source_to_address\n \n as \n \n via_source_to_address\n \n, \n \n \n via_source_to_name\n \n as \n \n via_source_to_name\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n _fivetran_deleted,\n assignee_id,\n brand_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__daylight_time_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__daylight_time_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__daylight_time_tmp"], "alias": "stg_zendesk__daylight_time_tmp", "checksum": {"name": "sha256", "checksum": "01afb893cce2ef776ef8c4c64dbd2cf3e40fe1f73986fdc4b78fd99ff0948ac8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.27414, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'daylight_time')) }}\nfrom {{ source('zendesk', 'daylight_time') }} as daylight_time_table", "language": "sql", "refs": [], "sources": [["zendesk", "daylight_time"], ["zendesk", "daylight_time"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__daylight_time_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"year\",\n \"_fivetran_synced\",\n \"daylight_end_utc\",\n \"daylight_offset\",\n \"daylight_start_utc\"\nfrom \"postgres\".\"zz_zendesk\".\"daylight_time_data\" as daylight_time_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tmp"], "alias": "stg_zendesk__user_tmp", "checksum": {"name": "sha256", "checksum": "606364c3b138f68707d75a04f859f28d4b0f17f99966b27a8f6087adfa091042"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.285779, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','user')) }} \nfrom {{ source('zendesk','user') }} as user_table", "language": "sql", "refs": [], "sources": [["zendesk", "user"], ["zendesk", "user"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"active\",\n \"alias\",\n \"authenticity_token\",\n \"chat_only\",\n \"created_at\",\n \"details\",\n \"email\",\n \"external_id\",\n \"last_login_at\",\n \"locale\",\n \"locale_id\",\n \"moderator\",\n \"name\",\n \"notes\",\n \"only_private_comments\",\n \"organization_id\",\n \"phone\",\n \"remote_photo_url\",\n \"restricted_agent\",\n \"role\",\n \"shared\",\n \"shared_agent\",\n \"signature\",\n \"suspended\",\n \"ticket_restriction\",\n \"time_zone\",\n \"two_factor_auth_enabled\",\n \"updated_at\",\n \"url\",\n \"verified\" \nfrom \"postgres\".\"zz_zendesk\".\"user_data\" as user_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__group_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__group_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__group_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__group_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__group_tmp"], "alias": "stg_zendesk__group_tmp", "checksum": {"name": "sha256", "checksum": "dc91ce1ab4b5ce5fec29b74b8f999d04fa063ab6354b7387d5875997f4db7e11"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.289189, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','group')) }} \nfrom {{ source('zendesk','group') }} as group_table", "language": "sql", "refs": [], "sources": [["zendesk", "group"], ["zendesk", "group"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.group"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__group_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"created_at\",\n \"name\",\n \"updated_at\",\n \"url\" \nfrom \"postgres\".\"zz_zendesk\".\"group_data\" as group_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tmp"], "alias": "stg_zendesk__ticket_tmp", "checksum": {"name": "sha256", "checksum": "b90132a6d22e753a066ebeaaea0bc164376837b702d7886ad0d1bb1a993e6e9a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.292505, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket')) }}\nfrom {{ source('zendesk', 'ticket') }} as ticket_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket"], ["zendesk", "ticket"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"allow_channelback\",\n \"assignee_id\",\n \"brand_id\",\n \"created_at\",\n \"description\",\n \"due_at\",\n \"external_id\",\n \"forum_topic_id\",\n \"group_id\",\n \"has_incidents\",\n \"is_public\",\n \"organization_id\",\n \"priority\",\n \"problem_id\",\n \"recipient\",\n \"requester_id\",\n \"status\",\n \"subject\",\n \"submitter_id\",\n \"system_client\",\n \"ticket_form_id\",\n \"type\",\n \"updated_at\",\n \"url\",\n \"via_channel\",\n \"via_source_from_id\",\n \"via_source_from_title\",\n \"via_source_rel\",\n \"via_source_to_address\",\n \"via_source_to_name\",\n \"merged_ticket_ids\",\n \"via_source_from_address\",\n \"followup_ids\",\n \"via_followup_source_id\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_data\" as ticket_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__brand_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__brand_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__brand_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__brand_tmp"], "alias": "stg_zendesk__brand_tmp", "checksum": {"name": "sha256", "checksum": "9658c9bd90fda5610067615a971eff98dc7c7b8c04827b9ab04da65f28630381"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.296489, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','brand')) }} \nfrom {{ source('zendesk','brand') }} as brand_table", "language": "sql", "refs": [], "sources": [["zendesk", "brand"], ["zendesk", "brand"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.brand"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__brand_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"brand_url\",\n \"default\",\n \"has_help_center\",\n \"help_center_state\",\n \"logo_content_type\",\n \"logo_content_url\",\n \"logo_deleted\",\n \"logo_file_name\",\n \"logo_height\",\n \"logo_id\",\n \"logo_inline\",\n \"logo_mapped_content_url\",\n \"logo_size\",\n \"logo_url\",\n \"logo_width\",\n \"name\",\n \"subdomain\",\n \"url\" \nfrom \"postgres\".\"zz_zendesk\".\"brand_data\" as brand_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tag_tmp"], "alias": "stg_zendesk__ticket_tag_tmp", "checksum": {"name": "sha256", "checksum": "d88425c9db1a948768fa8683e58654de3aab9ffc2966d829b6707c12afd94283"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.299778, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_tag')) }}\nfrom {{ source('zendesk', 'ticket_tag') }} as ticket_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_tag"], ["zendesk", "ticket_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tag_tmp.sql", "compiled": true, "compiled_code": "select \"tag\",\n \"ticket_id\",\n \"_fivetran_synced\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_tag_data\" as ticket_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_holiday_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_holiday_tmp"], "alias": "stg_zendesk__schedule_holiday_tmp", "checksum": {"name": "sha256", "checksum": "b8c30a0287707a207a6a36bf2c2ee3efe094368261417f5dbb5c5c5f4f858b9c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.30307, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"", "raw_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True) and var('using_holidays', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule_holiday')) }}\nfrom {{ source('zendesk', 'schedule_holiday') }} as schedule_holiday_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule_holiday"], ["zendesk", "schedule_holiday"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules or using_holidays variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"schedule_id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_date\",\n \"name\",\n \"start_date\"\nfrom \"postgres\".\"zz_zendesk\".\"schedule_holiday_data\" as schedule_holiday_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tag_tmp"], "alias": "stg_zendesk__user_tag_tmp", "checksum": {"name": "sha256", "checksum": "7ee78431bec698af41296439428c74a8d5f8fa607c55e9b5a9b97de8b777f490"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.306769, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','user_tag')) }} \nfrom {{ source('zendesk','user_tag') }} as user_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "user_tag"], ["zendesk", "user_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nselect \"tag\",\n \"user_id\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zz_zendesk\".\"user_tag_data\" as user_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_field_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_field_history_tmp"], "alias": "stg_zendesk__ticket_field_history_tmp", "checksum": {"name": "sha256", "checksum": "9dbb7257a2998c6e0d0d7a572aa7b0d301c777cea8e7085abfa42809b9312aa7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.311093, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_field_history')) }}\nfrom {{ source('zendesk', 'ticket_field_history') }} as ticket_field_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_field_history"], ["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "compiled": true, "compiled_code": "select \"field_name\",\n \"ticket_id\",\n \"updated\",\n \"_fivetran_synced\",\n \"user_id\",\n \"value\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_field_history_data\" as ticket_field_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_form_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_form_history_tmp"], "alias": "stg_zendesk__ticket_form_history_tmp", "checksum": {"name": "sha256", "checksum": "0e95f65a6932c12231ef9419574fd09b287a70ca20612cce228a7fb642fe1609"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.314401, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_form_history')) }}\nfrom {{ source('zendesk', 'ticket_form_history') }} as ticket_form_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_form_history"], ["zendesk", "ticket_form_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"updated_at\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"created_at\",\n \"display_name\",\n \"end_user_visible\",\n \"name\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_form_history_data\" as ticket_form_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_comment_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_comment_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_comment_tmp"], "alias": "stg_zendesk__ticket_comment_tmp", "checksum": {"name": "sha256", "checksum": "756209cf9e8c53e873cd7ac7a2dce2bdbafbd5a9d416e503c628b3ee57603c86"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.317961, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_comment')) }}\nfrom {{ source('zendesk', 'ticket_comment') }} as ticket_comment_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_comment"], ["zendesk", "ticket_comment"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_comment_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"body\",\n \"created\",\n \"facebook_comment\",\n \"public\",\n \"ticket_id\",\n \"tweet\",\n \"user_id\",\n \"voice_comment\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_comment_data\" as ticket_comment_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tag_tmp"], "alias": "stg_zendesk__organization_tag_tmp", "checksum": {"name": "sha256", "checksum": "b917812c188e64cda849a61d784cd95507c1c9187fc0ef2e083f2eee61c58231"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.321266, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','organization_tag')) }} \nfrom {{ source('zendesk','organization_tag') }} as organization_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization_tag"], ["zendesk", "organization_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nselect \"organization_id\",\n \"tag\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zz_zendesk\".\"organization_tag_data\" as organization_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__audit_log_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__audit_log_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__audit_log_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__audit_log_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__audit_log_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__audit_log_tmp"], "alias": "stg_zendesk__audit_log_tmp", "checksum": {"name": "sha256", "checksum": "099cfd6411ac2c206f87b00391b5b12877367364bb450bafa449e4e1098acaf0"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.325442, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__audit_log_tmp\"", "raw_code": "{{ config(enabled=var('using_schedules', True) and var('using_schedule_histories', False)) }}\n\nselect {{ dbt_utils.star(source('zendesk','audit_log')) }} \nfrom {{ source('zendesk','audit_log') }} as audit_log_table", "language": "sql", "refs": [], "sources": [["zendesk", "audit_log"], ["zendesk", "audit_log"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.audit_log"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__audit_log_tmp.sql", "compiled": true, "compiled_code": "\n\nselect \"id\",\n \"_fivetran_synced\",\n \"action\",\n \"actor_id\",\n \"change_description\",\n \"created_at\",\n \"source_id\",\n \"source_label\",\n \"source_type\" \nfrom \"postgres\".\"zz_zendesk\".\"audit_log_data\" as audit_log_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_tmp"], "alias": "stg_zendesk__schedule_tmp", "checksum": {"name": "sha256", "checksum": "7d55acbaaa3cc93868bcd3fe4f945b1ecb4871da7b8bed7bf04714ce3fc11eef"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.329109, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule')) }}\nfrom {{ source('zendesk', 'schedule') }} as schedule_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule"], ["zendesk", "schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"end_time\",\n \"id\",\n \"start_time\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_time_utc\",\n \"name\",\n \"start_time_utc\",\n \"time_zone\",\n \"created_at\"\nfrom \"postgres\".\"zz_zendesk\".\"schedule_data\" as schedule_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__organization_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tmp"], "alias": "stg_zendesk__organization_tmp", "checksum": {"name": "sha256", "checksum": "f2b39377f97f3a1a71fee168330c6971c06292c4ea702091a978eb64af9bd28f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1728513416.332664, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'organization')) }}\nfrom {{ source('zendesk','organization') }} as organization_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization"], ["zendesk", "organization"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"created_at\",\n \"details\",\n \"external_id\",\n \"group_id\",\n \"name\",\n \"notes\",\n \"shared_comments\",\n \"shared_tickets\",\n \"updated_at\",\n \"url\"\nfrom \"postgres\".\"zz_zendesk\".\"organization_data\" as organization_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_schedule_tmp"], "alias": "stg_zendesk__ticket_schedule_tmp", "checksum": {"name": "sha256", "checksum": "59d017b8bb4285288bd47b79a1cb1afdb64faca436f52a718f6c8051d24cf6f1"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.335979, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\n{%- set source_relation = adapter.get_relation(\n database=source('zendesk', 'ticket_schedule').database,\n schema=source('zendesk', 'ticket_schedule').schema,\n identifier=source('zendesk', 'ticket_schedule').name) -%}\n\n{% set table_exists=source_relation is not none %}\n\n{% if table_exists %}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_schedule')) }}\nfrom {{ source('zendesk', 'ticket_schedule') }} as ticket_schedule_table\n\n{% else %}\n\nselect\n cast(null as {{ dbt.type_timestamp() }}) as _fivetran_synced,\n cast(null as {{ dbt.type_timestamp() }}) as created_at,\n cast(null as {{ dbt.type_int() }}) as schedule_id,\n cast(null as {{ dbt.type_int() }}) as ticket_id\n\n{% endif %}", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\n\n\nselect \"created_at\",\n \"ticket_id\",\n \"_fivetran_synced\",\n \"schedule_id\"\nfrom \"postgres\".\"zz_zendesk\".\"ticket_schedule_data\" as ticket_schedule_table\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__domain_name_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__domain_name_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__domain_name_tmp"], "alias": "stg_zendesk__domain_name_tmp", "checksum": {"name": "sha256", "checksum": "58ba804a3f1cf2e7abe29a28cc9064e9be0355e6b358cca9e714e5777ff11b4b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.3417192, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'domain_name')) }} \nfrom {{ source('zendesk', 'domain_name') }} as domain_name_table", "language": "sql", "refs": [], "sources": [["zendesk", "domain_name"], ["zendesk", "domain_name"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__domain_name_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nselect \"index\",\n \"organization_id\",\n \"_fivetran_synced\",\n \"domain_name\" \nfrom \"postgres\".\"zz_zendesk\".\"domain_name_data\" as domain_name_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"database": "postgres", "schema": "zz_zendesk_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__time_zone_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__time_zone_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__time_zone_tmp"], "alias": "stg_zendesk__time_zone_tmp", "checksum": {"name": "sha256", "checksum": "b2a214af27259564121fd0c977a7d7388bd644f797f972ed48575a4979819ec2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1728513416.3452811, "relation_name": "\"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'time_zone')) }} \nfrom {{ source('zendesk', 'time_zone') }} as time_zone_table", "language": "sql", "refs": [], "sources": [["zendesk", "time_zone"], ["zendesk", "time_zone"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__time_zone_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"_fivetran_synced\",\n \"standard_offset\" \nfrom \"postgres\".\"zz_zendesk\".\"time_zone_data\" as time_zone_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef", "fqn": ["zendesk", "unique_zendesk__ticket_enriched_ticket_id"], "alias": "unique_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.492933, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}}, "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "fqn": ["zendesk", "not_null_zendesk__ticket_enriched_ticket_id"], "alias": "not_null_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.4939709, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}}, "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__sla_policies_sla_event_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__sla_policies_sla_event_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd", "fqn": ["zendesk", "unique_zendesk__sla_policies_sla_event_id"], "alias": "unique_zendesk__sla_policies_sla_event_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.4948611, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__sla_policies"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__sla_policies_sla_event_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n sla_event_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__sla_policies\"\nwhere sla_event_id is not null\ngroup by sla_event_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "sla_event_id", "file_key_name": "models.zendesk__sla_policies", "attached_node": "model.zendesk.zendesk__sla_policies", "test_metadata": {"name": "unique", "kwargs": {"column_name": "sla_event_id", "model": "{{ get_where_subquery(ref('zendesk__sla_policies')) }}"}, "namespace": null}}, "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c", "fqn": ["zendesk", "unique_zendesk__ticket_metrics_ticket_id"], "alias": "unique_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.495908, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}}, "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "fqn": ["zendesk", "not_null_zendesk__ticket_metrics_ticket_id"], "alias": "not_null_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.4967349, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_ticket_id"], "alias": "unique_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.522091, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_ticket_id"], "alias": "not_null_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.522947, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e", "fqn": ["zendesk_source", "unique_stg_zendesk__brand_brand_id"], "alias": "unique_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.52376, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n brand_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is not null\ngroup by brand_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand", "test_metadata": {"name": "unique", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "fqn": ["zendesk_source", "not_null_stg_zendesk__brand_brand_id"], "alias": "not_null_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.524949, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect brand_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__domain_name_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__domain_name_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3", "fqn": ["zendesk_source", "not_null_stg_zendesk__domain_name_organization_id"], "alias": "not_null_stg_zendesk__domain_name_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.525763, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__domain_name_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__domain_name\"\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__domain_name", "attached_node": "model.zendesk_source.stg_zendesk__domain_name", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__domain_name')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd", "fqn": ["zendesk_source", "unique_stg_zendesk__group_group_id"], "alias": "unique_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.526567, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n group_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is not null\ngroup by group_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group", "test_metadata": {"name": "unique", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "fqn": ["zendesk_source", "not_null_stg_zendesk__group_group_id"], "alias": "not_null_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.527373, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect group_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31", "fqn": ["zendesk_source", "unique_stg_zendesk__organization_organization_id"], "alias": "unique_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.5281768, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n organization_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is not null\ngroup by organization_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization", "test_metadata": {"name": "unique", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "fqn": ["zendesk_source", "not_null_stg_zendesk__organization_organization_id"], "alias": "not_null_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.5290308, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.52983, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_comment_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is not null\ngroup by ticket_comment_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment", "test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.5306282, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_comment_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11", "fqn": ["zendesk_source", "unique_stg_zendesk__user_user_id"], "alias": "unique_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.531424, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n user_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is not null\ngroup by user_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user", "test_metadata": {"name": "unique", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "fqn": ["zendesk_source", "not_null_stg_zendesk__user_user_id"], "alias": "not_null_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.532278, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect user_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_form_history_ticket_form_id"], "alias": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.533076, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_form_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\nwhere ticket_form_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_form_id", "file_key_name": "models.stg_zendesk__ticket_form_history", "attached_node": "model.zendesk_source.stg_zendesk__ticket_form_history", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_form_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_form_history')) }}"}, "namespace": null}}, "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year", "resource_type": "test", "package_name": "zendesk_source", "path": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d", "fqn": ["zendesk_source", "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year"], "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9"}, "created_at": 1728513416.533886, "relation_name": null, "raw_code": "{{ dbt_utils.test_unique_combination_of_columns(**_dbt_generic_test_kwargs) }}{{ config(alias=\"dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9\") }}", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.test_unique_combination_of_columns", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "compiled": true, "compiled_code": "\n\n\n\n\n\nwith validation_errors as (\n\n select\n time_zone, year\n from \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__daylight_time\"\n group by time_zone, year\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.stg_zendesk__daylight_time", "attached_node": "model.zendesk_source.stg_zendesk__daylight_time", "test_metadata": {"name": "unique_combination_of_columns", "kwargs": {"combination_of_columns": ["time_zone", "year"], "model": "{{ get_where_subquery(ref('stg_zendesk__daylight_time')) }}"}, "namespace": "dbt_utils"}}, "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf", "fqn": ["zendesk_source", "unique_stg_zendesk__time_zone_time_zone"], "alias": "unique_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.5455968, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n time_zone as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is not null\ngroup by time_zone\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone", "test_metadata": {"name": "unique", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "fqn": ["zendesk_source", "not_null_stg_zendesk__time_zone_time_zone"], "alias": "not_null_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.5464199, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect time_zone\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}}, "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "unique_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a", "fqn": ["zendesk_source", "unique_stg_zendesk__schedule_holiday_holiday_id"], "alias": "unique_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.547318, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n holiday_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is not null\ngroup by holiday_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday", "test_metadata": {"name": "unique", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}}, "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": {"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "not_null_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "fqn": ["zendesk_source", "not_null_stg_zendesk__schedule_holiday_holiday_id"], "alias": "not_null_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1728513416.54814, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect holiday_id\nfrom \"postgres\".\"zz_zendesk_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}}}, "sources": {"source.zendesk_source.zendesk.audit_log": {"database": "postgres", "schema": "zz_zendesk", "name": "audit_log", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.audit_log", "fqn": ["zendesk_source", "zendesk", "audit_log"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "audit_log_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The `audit_log` table captures historical changes and actions within Zendesk. It provides a record of modifications made to tickets, schedules, and other objects, allowing for a detailed audit trail. Each row represents an action performed by an actor, including the time of the action, the affected entity, and a description of the changes. This table is especially useful for tracking schedule modifications and maintaining a history of schedule changes.\n", "columns": {"id": {"name": "id", "description": "The unique identifier for each audit log entry, representing a distinct action or change.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "action": {"name": "action", "description": "Describes the specific action performed within Zendesk, such as ticket updates or schedule modifications.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "actor_id": {"name": "actor_id", "description": "The unique identifier of the user or system responsible for performing the action.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "change_description": {"name": "change_description", "description": "A detailed description of the changes made during the action, capturing what was altered.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The timestamp indicating when the action was performed and recorded in the audit log.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_id": {"name": "source_id", "description": "The unique identifier of the entity affected by the action, such as a ticket or schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_label": {"name": "source_label", "description": "A label that provides additional context about the affected entity, typically related to its type or name.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_type": {"name": "source_type", "description": "Specifies the type of entity impacted by the action, such as a ticket, schedule, or user.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_synced": {"name": "_fivetran_synced", "description": "The timestamp when the record was last synchronized by Fivetran, used to track data freshness.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"audit_log_data\"", "created_at": 1728513416.609035}, "source.zendesk_source.zendesk.ticket": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket", "fqn": ["zendesk_source", "zendesk", "ticket"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_channel": {"name": "via_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_id": {"name": "via_source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_title": {"name": "via_source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_rel": {"name": "via_source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_address": {"name": "via_source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_name": {"name": "via_source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_data\"", "created_at": 1728513416.6098208}, "source.zendesk_source.zendesk.brand": {"database": "postgres", "schema": "zz_zendesk", "name": "brand", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.brand", "fqn": ["zendesk_source", "zendesk", "brand"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "brand_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"id": {"name": "id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "created_at": 1728513416.609927}, "source.zendesk_source.zendesk.domain_name": {"database": "postgres", "schema": "zz_zendesk", "name": "domain_name", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.domain_name", "fqn": ["zendesk_source", "zendesk", "domain_name"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "domain_name_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"domain_name_data\"", "created_at": 1728513416.610034}, "source.zendesk_source.zendesk.group": {"database": "postgres", "schema": "zz_zendesk", "name": "group", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.group", "fqn": ["zendesk_source", "zendesk", "group"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "group_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"group_data\"", "created_at": 1728513416.610124}, "source.zendesk_source.zendesk.organization_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization_tag", "fqn": ["zendesk_source", "zendesk", "organization_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "created_at": 1728513416.610223}, "source.zendesk_source.zendesk.organization": {"database": "postgres", "schema": "zz_zendesk", "name": "organization", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization", "fqn": ["zendesk_source", "zendesk", "organization"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique external id to associate organizations to an external record", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_names": {"name": "domain_names", "description": "An array of domain names associated with this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "notes": {"name": "notes", "description": "Any notes you have about the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "New tickets from users in this organization are automatically put in this group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_tickets": {"name": "shared_tickets", "description": "End users in this organization are able to see each other's tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_comments": {"name": "shared_comments", "description": "End users in this organization are able to see each other's comments on tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tags of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_fields": {"name": "organization_fields", "description": "Custom fields for this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_data\"", "created_at": 1728513416.610328}, "source.zendesk_source.zendesk.ticket_comment": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_comment", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_comment", "fqn": ["zendesk_source", "zendesk", "ticket_comment"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_comment_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created": {"name": "created", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "public": {"name": "public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "facebook_comment": {"name": "facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tweet": {"name": "tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "voice_comment": {"name": "voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_comment_data\"", "created_at": 1728513416.6104288}, "source.zendesk_source.zendesk.user_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "user_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user_tag", "fqn": ["zendesk_source", "zendesk", "user_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "created_at": 1728513416.6105332}, "source.zendesk_source.zendesk.user": {"database": "postgres", "schema": "zz_zendesk", "name": "user", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user", "fqn": ["zendesk_source", "zendesk", "user"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended": {"name": "suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "_fivetran_deleted": {"name": "_fivetran_deleted", "description": "Boolean created by Fivetran to indicate whether the record has been deleted.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "created_at": 1728513416.610699}, "source.zendesk_source.zendesk.schedule": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule", "fqn": ["zendesk_source", "zendesk", "schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The support schedules created with different business hours and holidays.", "columns": {"id": {"name": "id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_data\"", "created_at": 1728513416.6108}, "source.zendesk_source.zendesk.ticket_schedule": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_schedule", "fqn": ["zendesk_source", "zendesk", "ticket_schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_schedule_data\"", "created_at": 1728513416.6108859}, "source.zendesk_source.zendesk.ticket_form_history": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_form_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_form_history", "fqn": ["zendesk_source", "zendesk", "ticket_form_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_form_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_form_history_data\"", "created_at": 1728513416.6109881}, "source.zendesk_source.zendesk.ticket_tag": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_tag", "fqn": ["zendesk_source", "zendesk", "ticket_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_tag_data\"", "created_at": 1728513416.611073}, "source.zendesk_source.zendesk.ticket_field_history": {"database": "postgres", "schema": "zz_zendesk", "name": "ticket_field_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_field_history", "fqn": ["zendesk_source", "zendesk", "ticket_field_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_field_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated": {"name": "updated", "description": "The time the ticket field value was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"ticket_field_history_data\"", "created_at": 1728513416.61116}, "source.zendesk_source.zendesk.daylight_time": {"database": "postgres", "schema": "zz_zendesk", "name": "daylight_time", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.daylight_time", "fqn": ["zendesk_source", "zendesk", "daylight_time"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "daylight_time_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"daylight_time_data\"", "created_at": 1728513416.611247}, "source.zendesk_source.zendesk.time_zone": {"database": "postgres", "schema": "zz_zendesk", "name": "time_zone", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.time_zone", "fqn": ["zendesk_source", "zendesk", "time_zone"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "time_zone_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zz_zendesk\".\"time_zone_data\"", "created_at": 1728513416.611328}, "source.zendesk_source.zendesk.schedule_holiday": {"database": "postgres", "schema": "zz_zendesk", "name": "schedule_holiday", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule_holiday", "fqn": ["zendesk_source", "zendesk", "schedule_holiday"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_holiday_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Information about holidays for each specified schedule.", "columns": {"end_date": {"name": "end_date", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "id": {"name": "id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date": {"name": "start_date", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zz_zendesk\".\"schedule_holiday_data\"", "created_at": 1728513416.611423}}, "macros": {"macro.zendesk_integration_tests.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "zendesk_integration_tests", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.zendesk_integration_tests.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.525457, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.525769, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5259688, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5260851, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5262048, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.526321, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.527894, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.528252, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.528925, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5290508, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.538273, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.538837, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.53914, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.539436, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.539878, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.54029, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.540458, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.54078, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.541149, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.541932, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.542121, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.542424, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5426931, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5430982, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.543309, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.543874, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.544069, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.544181, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.54439, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5445702, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.544986, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.545702, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.545846, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.546131, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5462651, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5465229, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5473878, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }}\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\t{{ ';' if not loop.last else \"\" }}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.54789, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config.model) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.548178, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}{{ ';' if not loop.last else \"\" }}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.548605, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.548736, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5493948, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.549564, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.549696, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.550248, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5504441, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.550692, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.551317, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.554523, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.554673, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.555151, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.555539, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5566552, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.556906, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.557067, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5572119, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.557358, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.557739, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.558031, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.558321, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5587578, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.559029, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.562628, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.562813, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.563025, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.563714, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5638762, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5640378, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.56537, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.566644, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5708869, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.571151, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.571313, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5713978, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5715368, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.571647, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.571841, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.57267, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.572852, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5730891, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5734951, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.579517, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.582251, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.583381, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.583672, "supported_languages": null}, "macro.dbt.get_unit_test_sql": {"name": "get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_unit_test_sql", "macro_sql": "{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_unit_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5838678, "supported_languages": null}, "macro.dbt.default__get_unit_test_sql": {"name": "default__get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_unit_test_sql", "macro_sql": "{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n-- Build actual result given inputs\nwith dbt_internal_unit_test_actual as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal(\"actual\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ main_sql }}\n ) _dbt_internal_unit_test_actual\n),\n-- Build expected result\ndbt_internal_unit_test_expected as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal(\"expected\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ expected_fixture_sql }}\n ) _dbt_internal_unit_test_expected\n)\n-- Union actual and expected results\nselect * from dbt_internal_unit_test_actual\nunion all\nselect * from dbt_internal_unit_test_expected\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.58444, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5848022, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5851579, "supported_languages": null}, "macro.dbt.materialization_unit_default": {"name": "materialization_unit_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/unit.sql", "original_file_path": "macros/materializations/tests/unit.sql", "unique_id": "macro.dbt.materialization_unit_default", "macro_sql": "{%- materialization unit, default -%}\n\n {% set relations = [] %}\n\n {% set expected_rows = config.get('expected_rows') %}\n {% set expected_sql = config.get('expected_sql') %}\n {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %}\n {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}\n {%- set column_name_to_data_types = {} -%}\n {%- for column in columns_in_relation -%}\n {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n {%- endfor -%}\n\n {% if not expected_sql %}\n {% set expected_sql = get_expected_sql(expected_rows, column_name_to_data_types) %}\n {% endif %}\n {% set unit_test_sql = get_unit_test_sql(sql, expected_sql, tested_expected_column_names) %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ unit_test_sql }}\n\n {%- endcall %}\n\n {% do adapter.drop_relation(temp_relation) %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_empty_subquery_sql", "macro.dbt.get_expected_sql", "macro.dbt.get_unit_test_sql", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.586889, "supported_languages": ["sql"]}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.591865, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.592252, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5924978, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.593855, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.594084, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.594707, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.5974689, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.600373, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.60198, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.602499, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.603122, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.603348, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.604037, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.610763, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6126819, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.61295, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6139011, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.614161, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.614775, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6153831, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.616244, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6164732, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.616651, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6169658, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.617193, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6175208, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6177049, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.617962, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6181362, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.618287, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.618549, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.623457, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.629121, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6303601, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6314921, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.632299, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.632548, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.632663, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6329432, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.633075, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.636802, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.639898, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.645042, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.645895, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.646122, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.646572, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6467588, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6468868, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6470249, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.647168, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.647352, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.647485, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.647987, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6481678, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6493719, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6498072, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.650166, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.650669, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.650919, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6511931, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.651571, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.65181, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.652499, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6528482, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.653023, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.653242, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.653449, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6542542, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6554759, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6558468, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.65609, "supported_languages": null}, "macro.dbt.drop_schema_named": {"name": "drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.drop_schema_named", "macro_sql": "{% macro drop_schema_named(schema_name) %}\n {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema_named"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.656348, "supported_languages": null}, "macro.dbt.default__drop_schema_named": {"name": "default__drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.default__drop_schema_named", "macro_sql": "{% macro default__drop_schema_named(schema_name) %}\n {% set schema_relation = api.Relation.create(schema=schema_name) %}\n {{ adapter.drop_schema(schema_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6565492, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.656854, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6570518, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.657738, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.65815, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6583512, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6586199, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.658953, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.659211, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.659751, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6601999, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.660522, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.660723, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{- adapter.dispatch('drop_materialized_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.660963, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.661066, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.661331, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.661543, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.661838, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6619678, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6622279, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6623669, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.662954, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.66313, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.663397, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.663535, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.663795, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.663929, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.664905, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.665021, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.665635, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.665816, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.665948, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.66723, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.667592, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.667914, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{- adapter.dispatch('drop_table', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.668161, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.668262, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.668521, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.668658, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.668916, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6690528, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.669878, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.670057, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.670465, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6712031, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.67167, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6718478, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6720152, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{- adapter.dispatch('drop_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.672251, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.672352, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6731892, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.673333, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6744618, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.674651, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.674861, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.67512, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.675256, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.675647, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.675801, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6759732, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.676374, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.676715, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.677002, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.677237, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.677855, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6793, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6798599, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.680137, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.681961, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6831381, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.683917, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.684153, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.684372, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.684448, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.685187, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6857479, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.685966, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6863139, "supported_languages": null}, "macro.dbt.date": {"name": "date", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date.sql", "original_file_path": "macros/utils/date.sql", "unique_id": "macro.dbt.date", "macro_sql": "{% macro date(year, month, day) %}\n {{ return(adapter.dispatch('date', 'dbt') (year, month, day)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.686634, "supported_languages": null}, "macro.dbt.default__date": {"name": "default__date", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date.sql", "original_file_path": "macros/utils/date.sql", "unique_id": "macro.dbt.default__date", "macro_sql": "{% macro default__date(year, month, day) -%}\n {%- set dt = modules.datetime.date(year, month, day) -%}\n {%- set iso_8601_formatted_date = dt.strftime('%Y-%m-%d') -%}\n to_date('{{ iso_8601_formatted_date }}', 'YYYY-MM-DD')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.686912, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6872241, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.687378, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.687616, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6877341, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6885371, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.688926, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.689109, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.689623, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.689915, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.690051, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.690389, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.69062, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6908329, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.690906, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6911528, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.691287, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6915672, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.691701, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6923368, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.692723, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6930418, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.693197, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6934662, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.693599, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6938481, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6939971, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.694228, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.694378, "supported_languages": null}, "macro.dbt.cast": {"name": "cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.cast", "macro_sql": "{% macro cast(field, type) %}\n {{ return(adapter.dispatch('cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.694648, "supported_languages": null}, "macro.dbt.default__cast": {"name": "default__cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.default__cast", "macro_sql": "{% macro default__cast(field, type) %}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6947799, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.695012, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.695174, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6954439, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6955771, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.69582, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.695949, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.696906, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.69706, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.697216, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.697362, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.697567, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.697727, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.697898, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.698076, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6982331, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.698376, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.698528, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6986701, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.698826, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.698962, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.699232, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.699441, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.6996732, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.699771, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.700093, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.700341, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.700484, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.70099, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7011478, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.701362, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.701625, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7017899, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.702212, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7024589, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.70273, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.702865, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7032251, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.703406, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7035599, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7037382, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.704206, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.704577, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.704716, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.704816, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.704968, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7050428, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.705197, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7053518, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.706164, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7062972, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7064462, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7068212, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.706998, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.707127, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.707276, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.707395, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7094882, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.709652, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7098649, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.71015, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7104619, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.710766, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.710943, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.711096, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.711333, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7118552, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.712072, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.712209, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.712611, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.712991, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7132711, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.713494, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7152839, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.715401, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.715561, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.715673, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.716001, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.716178, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7162812, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.716494, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.716749, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.716964, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.717141, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.717356, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7179892, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7181718, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7184021, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.718616, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.719687, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.720293, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.720491, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7206268, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7212842, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7214458, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.721638, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.721795, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7220511, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.722572, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.725408, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7256498, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.725847, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.726138, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.726345, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7265081, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.726686, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.726924, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7271261, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.727411, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7275882, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.727744, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.727901, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7280462, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.728246, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.728409, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7306812, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7308328, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.731188, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.731393, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.731586, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.731759, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n {{ cast('null', col['data_type']) }} as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.732942, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.733282, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7334652, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.733792, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.734011, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.73457, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.734865, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.735599, "supported_languages": null}, "macro.dbt.get_fixture_sql": {"name": "get_fixture_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_fixture_sql", "macro_sql": "{% macro get_fixture_sql(rows, column_name_to_data_types) %}\n-- Fixture for {{ model.name }}\n{% set default_row = {} %}\n\n{%- if not column_name_to_data_types -%}\n{#-- Use defer_relation IFF it is available in the manifest and 'this' is missing from the database --#}\n{%- set this_or_defer_relation = defer_relation if (defer_relation and not load_relation(this)) else this -%}\n{%- set columns_in_relation = adapter.get_columns_in_relation(this_or_defer_relation) -%}\n\n{%- set column_name_to_data_types = {} -%}\n{%- for column in columns_in_relation -%}\n{#-- This needs to be a case-insensitive comparison --#}\n{%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n{%- endfor -%}\n{%- endif -%}\n\n{%- if not column_name_to_data_types -%}\n {{ exceptions.raise_compiler_error(\"Not able to get columns for unit test '\" ~ model.name ~ \"' from relation \" ~ this ~ \" because the relation doesn't exist\") }}\n{%- endif -%}\n\n{%- for column_name, column_type in column_name_to_data_types.items() -%}\n {%- do default_row.update({column_name: (safe_cast(\"null\", column_type) | trim )}) -%}\n{%- endfor -%}\n\n\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\n{%- set default_row_copy = default_row.copy() -%}\n{%- do default_row_copy.update(formatted_row) -%}\nselect\n{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n\n{%- if (rows | length) == 0 -%}\n select\n {%- for column_name, column_value in default_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%},{%- endif %}\n {%- endfor %}\n limit 0\n{%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_relation", "macro.dbt.safe_cast", "macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.738782, "supported_languages": null}, "macro.dbt.get_expected_sql": {"name": "get_expected_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_expected_sql", "macro_sql": "{% macro get_expected_sql(rows, column_name_to_data_types) %}\n\n{%- if (rows | length) == 0 -%}\n select * from dbt_internal_unit_test_actual\n limit 0\n{%- else -%}\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\nselect\n{%- for column_name, column_value in formatted_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.739388, "supported_languages": null}, "macro.dbt.format_row": {"name": "format_row", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.format_row", "macro_sql": "\n\n{%- macro format_row(row, column_name_to_data_types) -%}\n {#-- generate case-insensitive formatted row --#}\n {% set formatted_row = {} %}\n {%- for column_name, column_value in row.items() -%}\n {% set column_name = column_name|lower %}\n\n {%- if column_name not in column_name_to_data_types %}\n {#-- if user-provided row contains column name that relation does not contain, raise an error --#}\n {% set fixture_name = \"expected output\" if model.resource_type == 'unit_test' else (\"'\" ~ model.name ~ \"'\") %}\n {{ exceptions.raise_compiler_error(\n \"Invalid column name: '\" ~ column_name ~ \"' in unit test fixture for \" ~ fixture_name ~ \".\"\n \"\\nAccepted columns for \" ~ fixture_name ~ \" are: \" ~ (column_name_to_data_types.keys()|list)\n ) }}\n {%- endif -%}\n\n {%- set column_type = column_name_to_data_types[column_name] %}\n\n {#-- sanitize column_value: wrap yaml strings in quotes, apply cast --#}\n {%- set column_value_clean = column_value -%}\n {%- if column_value is string -%}\n {%- set column_value_clean = dbt.string_literal(dbt.escape_single_quotes(column_value)) -%}\n {%- elif column_value is none -%}\n {%- set column_value_clean = 'null' -%}\n {%- endif -%}\n\n {%- set row_update = {column_name: safe_cast(column_value_clean, column_type) } -%}\n {%- do formatted_row.update(row_update) -%}\n {%- endfor -%}\n {{ return(formatted_row) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.string_literal", "macro.dbt.escape_single_quotes", "macro.dbt.safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.740455, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.742094, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.742246, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7430131, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7433999, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.74394, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.744438, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.744544, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.745088, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.745325, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.745619, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.745901, "supported_languages": null}, "macro.dbt_utils.get_url_host": {"name": "get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.get_url_host", "macro_sql": "{% macro get_url_host(field) -%}\n {{ return(adapter.dispatch('get_url_host', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_host"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.746241, "supported_languages": null}, "macro.dbt_utils.default__get_url_host": {"name": "default__get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.default__get_url_host", "macro_sql": "{% macro default__get_url_host(field) -%}\n\n{%- set parsed =\n dbt.split_part(\n dbt.split_part(\n dbt.replace(\n dbt.replace(\n dbt.replace(field, \"'android-app://'\", \"''\"\n ), \"'http://'\", \"''\"\n ), \"'https://'\", \"''\"\n ), \"'/'\", 1\n ), \"'?'\", 1\n )\n\n-%}\n\n\n {{ dbt.safe_cast(\n parsed,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part", "macro.dbt.replace", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7466938, "supported_languages": null}, "macro.dbt_utils.get_url_path": {"name": "get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.get_url_path", "macro_sql": "{% macro get_url_path(field) -%}\n {{ return(adapter.dispatch('get_url_path', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_path"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7471611, "supported_languages": null}, "macro.dbt_utils.default__get_url_path": {"name": "default__get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.default__get_url_path", "macro_sql": "{% macro default__get_url_path(field) -%}\n\n {%- set stripped_url =\n dbt.replace(\n dbt.replace(field, \"'http://'\", \"''\"), \"'https://'\", \"''\")\n -%}\n\n {%- set first_slash_pos -%}\n coalesce(\n nullif({{ dbt.position(\"'/'\", stripped_url) }}, 0),\n {{ dbt.position(\"'?'\", stripped_url) }} - 1\n )\n {%- endset -%}\n\n {%- set parsed_path =\n dbt.split_part(\n dbt.right(\n stripped_url,\n dbt.length(stripped_url) ~ \"-\" ~ first_slash_pos\n ),\n \"'?'\", 1\n )\n -%}\n\n {{ dbt.safe_cast(\n parsed_path,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.position", "macro.dbt.split_part", "macro.dbt.right", "macro.dbt.length", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.747773, "supported_languages": null}, "macro.dbt_utils.get_url_parameter": {"name": "get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.get_url_parameter", "macro_sql": "{% macro get_url_parameter(field, url_parameter) -%}\n {{ return(adapter.dispatch('get_url_parameter', 'dbt_utils')(field, url_parameter)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7480772, "supported_languages": null}, "macro.dbt_utils.default__get_url_parameter": {"name": "default__get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.default__get_url_parameter", "macro_sql": "{% macro default__get_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"='\" -%}\n\n{%- set split = dbt.split_part(dbt.split_part(field, formatted_url_parameter, 2), \"'&'\", 1) -%}\n\nnullif({{ split }},'')\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.748379, "supported_languages": null}, "macro.dbt_utils.test_fewer_rows_than": {"name": "test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.test_fewer_rows_than", "macro_sql": "{% test fewer_rows_than(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_fewer_rows_than', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_fewer_rows_than"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.749453, "supported_languages": null}, "macro.dbt_utils.default__test_fewer_rows_than": {"name": "default__test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.default__test_fewer_rows_than", "macro_sql": "{% macro default__test_fewer_rows_than(model, compare_model, group_by_columns) %}\n\n{{ config(fail_calc = 'sum(coalesce(row_count_delta, 0))') }}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in equal_rowcount. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_fewer_rows_than'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_our_model \n from {{ model }}\n {{ groupby_gb_cols }}\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_comparison_model \n from {{ compare_model }}\n {{ groupby_gb_cols }}\n\n),\ncounts as (\n\n select\n\n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_our_model,\n count_comparison_model\n from a\n full join b on \n a.id_dbtutils_test_fewer_rows_than = b.id_dbtutils_test_fewer_rows_than\n {{ join_gb_cols }}\n\n),\nfinal as (\n\n select *,\n case\n -- fail the test if we have more rows than the reference model and return the row count delta\n when count_our_model > count_comparison_model then (count_our_model - count_comparison_model)\n -- fail the test if they are the same number\n when count_our_model = count_comparison_model then 1\n -- pass the test if the delta is positive (i.e. return the number 0)\n else 0\n end as row_count_delta\n from counts\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.750566, "supported_languages": null}, "macro.dbt_utils.test_equal_rowcount": {"name": "test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.test_equal_rowcount", "macro_sql": "{% test equal_rowcount(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_equal_rowcount', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equal_rowcount"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7514722, "supported_languages": null}, "macro.dbt_utils.default__test_equal_rowcount": {"name": "default__test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.default__test_equal_rowcount", "macro_sql": "{% macro default__test_equal_rowcount(model, compare_model, group_by_columns) %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = 'sum(coalesce(diff_count, 0))') }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(', ') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in fewer_rows_than. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_equal_rowcount'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_a \n from {{ model }}\n {{groupby_gb_cols}}\n\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_b \n from {{ compare_model }}\n {{groupby_gb_cols}}\n\n),\nfinal as (\n\n select\n \n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_a,\n count_b,\n abs(count_a - count_b) as diff_count\n\n from a\n full join b\n on\n a.id_dbtutils_test_equal_rowcount = b.id_dbtutils_test_equal_rowcount\n {{join_gb_cols}}\n\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.752488, "supported_languages": null}, "macro.dbt_utils.test_relationships_where": {"name": "test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.test_relationships_where", "macro_sql": "{% test relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n {{ return(adapter.dispatch('test_relationships_where', 'dbt_utils')(model, column_name, to, field, from_condition, to_condition)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_relationships_where"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.75315, "supported_languages": null}, "macro.dbt_utils.default__test_relationships_where": {"name": "default__test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.default__test_relationships_where", "macro_sql": "{% macro default__test_relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n\n{# T-SQL has no boolean data type so we use 1=1 which returns TRUE #}\n{# ref https://stackoverflow.com/a/7170753/3842610 #}\n\nwith left_table as (\n\n select\n {{column_name}} as id\n\n from {{model}}\n\n where {{column_name}} is not null\n and {{from_condition}}\n\n),\n\nright_table as (\n\n select\n {{field}} as id\n\n from {{to}}\n\n where {{field}} is not null\n and {{to_condition}}\n\n),\n\nexceptions as (\n\n select\n left_table.id,\n right_table.id as right_id\n\n from left_table\n\n left join right_table\n on left_table.id = right_table.id\n\n where right_table.id is null\n\n)\n\nselect * from exceptions\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7534752, "supported_languages": null}, "macro.dbt_utils.test_recency": {"name": "test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.test_recency", "macro_sql": "{% test recency(model, field, datepart, interval, ignore_time_component=False, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_recency', 'dbt_utils')(model, field, datepart, interval, ignore_time_component, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_recency"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.754178, "supported_languages": null}, "macro.dbt_utils.default__test_recency": {"name": "default__test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.default__test_recency", "macro_sql": "{% macro default__test_recency(model, field, datepart, interval, ignore_time_component, group_by_columns) %}\n\n{% set threshold = 'cast(' ~ dbt.dateadd(datepart, interval * -1, dbt.current_timestamp()) ~ ' as ' ~ ('date' if ignore_time_component else dbt.type_timestamp()) ~ ')' %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nwith recency as (\n\n select \n\n {{ select_gb_cols }}\n {% if ignore_time_component %}\n cast(max({{ field }}) as date) as most_recent\n {%- else %}\n max({{ field }}) as most_recent\n {%- endif %}\n\n from {{ model }}\n\n {{ groupby_gb_cols }}\n\n)\n\nselect\n\n {{ select_gb_cols }}\n most_recent,\n {{ threshold }} as threshold\n\nfrom recency\nwhere most_recent < {{ threshold }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.current_timestamp", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7549582, "supported_languages": null}, "macro.dbt_utils.test_not_constant": {"name": "test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.test_not_constant", "macro_sql": "{% test not_constant(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_constant', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_constant"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.755388, "supported_languages": null}, "macro.dbt_utils.default__test_not_constant": {"name": "default__test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.default__test_not_constant", "macro_sql": "{% macro default__test_not_constant(model, column_name, group_by_columns) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nselect\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count(distinct {{ column_name }}) as filler_column\n\nfrom {{ model }}\n\n {{groupby_gb_cols}}\n\nhaving count(distinct {{ column_name }}) = 1\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.75582, "supported_languages": null}, "macro.dbt_utils.test_accepted_range": {"name": "test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.test_accepted_range", "macro_sql": "{% test accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n {{ return(adapter.dispatch('test_accepted_range', 'dbt_utils')(model, column_name, min_value, max_value, inclusive)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_accepted_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.756514, "supported_languages": null}, "macro.dbt_utils.default__test_accepted_range": {"name": "default__test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.default__test_accepted_range", "macro_sql": "{% macro default__test_accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n\nwith meet_condition as(\n select *\n from {{ model }}\n),\n\nvalidation_errors as (\n select *\n from meet_condition\n where\n -- never true, defaults to an empty result set. Exists to ensure any combo of the `or` clauses below succeeds\n 1 = 2\n\n {%- if min_value is not none %}\n -- records with a value >= min_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} > {{- \"=\" if inclusive }} {{ min_value }}\n {%- endif %}\n\n {%- if max_value is not none %}\n -- records with a value <= max_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} < {{- \"=\" if inclusive }} {{ max_value }}\n {%- endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.75698, "supported_languages": null}, "macro.dbt_utils.test_not_accepted_values": {"name": "test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.test_not_accepted_values", "macro_sql": "{% test not_accepted_values(model, column_name, values, quote=True) %}\n {{ return(adapter.dispatch('test_not_accepted_values', 'dbt_utils')(model, column_name, values, quote)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.757513, "supported_languages": null}, "macro.dbt_utils.default__test_not_accepted_values": {"name": "default__test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.default__test_not_accepted_values", "macro_sql": "{% macro default__test_not_accepted_values(model, column_name, values, quote=True) %}\nwith all_values as (\n\n select distinct\n {{ column_name }} as value_field\n\n from {{ model }}\n\n),\n\nvalidation_errors as (\n\n select\n value_field\n\n from all_values\n where value_field in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n )\n\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.757879, "supported_languages": null}, "macro.dbt_utils.test_at_least_one": {"name": "test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.test_at_least_one", "macro_sql": "{% test at_least_one(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_at_least_one', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_at_least_one"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.75854, "supported_languages": null}, "macro.dbt_utils.default__test_at_least_one": {"name": "default__test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.default__test_at_least_one", "macro_sql": "{% macro default__test_at_least_one(model, column_name, group_by_columns) %}\n\n{% set pruned_cols = [column_name] %}\n\n{% if group_by_columns|length() > 0 %}\n\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n {% set pruned_cols = group_by_columns %}\n\n {% if column_name not in pruned_cols %}\n {% do pruned_cols.append(column_name) %}\n {% endif %}\n\n{% endif %}\n\n{% set select_pruned_cols = pruned_cols|join(' ,') %}\n\nselect *\nfrom (\n with pruned_rows as (\n select\n {{ select_pruned_cols }}\n from {{ model }}\n {% if group_by_columns|length() == 0 %}\n where {{ column_name }} is not null\n limit 1\n {% endif %}\n )\n select\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count({{ column_name }}) as filler_column\n\n from pruned_rows\n\n {{groupby_gb_cols}}\n\n having count({{ column_name }}) = 0\n\n) validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.75941, "supported_languages": null}, "macro.dbt_utils.test_unique_combination_of_columns": {"name": "test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.test_unique_combination_of_columns", "macro_sql": "{% test unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n {{ return(adapter.dispatch('test_unique_combination_of_columns', 'dbt_utils')(model, combination_of_columns, quote_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_unique_combination_of_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7600179, "supported_languages": null}, "macro.dbt_utils.default__test_unique_combination_of_columns": {"name": "default__test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.default__test_unique_combination_of_columns", "macro_sql": "{% macro default__test_unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n\n{% if not quote_columns %}\n {%- set column_list=combination_of_columns %}\n{% elif quote_columns %}\n {%- set column_list=[] %}\n {% for column in combination_of_columns -%}\n {% set column_list = column_list.append( adapter.quote(column) ) %}\n {%- endfor %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`quote_columns` argument for unique_combination_of_columns test must be one of [True, False] Got: '\" ~ quote ~\"'.'\"\n ) }}\n{% endif %}\n\n{%- set columns_csv=column_list | join(', ') %}\n\n\nwith validation_errors as (\n\n select\n {{ columns_csv }}\n from {{ model }}\n group by {{ columns_csv }}\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.760611, "supported_languages": null}, "macro.dbt_utils.test_cardinality_equality": {"name": "test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.test_cardinality_equality", "macro_sql": "{% test cardinality_equality(model, column_name, to, field) %}\n {{ return(adapter.dispatch('test_cardinality_equality', 'dbt_utils')(model, column_name, to, field)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_cardinality_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.761157, "supported_languages": null}, "macro.dbt_utils.default__test_cardinality_equality": {"name": "default__test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.default__test_cardinality_equality", "macro_sql": "{% macro default__test_cardinality_equality(model, column_name, to, field) %}\n\n{# T-SQL does not let you use numbers as aliases for columns #}\n{# Thus, no \"GROUP BY 1\" #}\n\nwith table_a as (\nselect\n {{ column_name }},\n count(*) as num_rows\nfrom {{ model }}\ngroup by {{ column_name }}\n),\n\ntable_b as (\nselect\n {{ field }},\n count(*) as num_rows\nfrom {{ to }}\ngroup by {{ field }}\n),\n\nexcept_a as (\n select *\n from table_a\n {{ dbt.except() }}\n select *\n from table_b\n),\n\nexcept_b as (\n select *\n from table_b\n {{ dbt.except() }}\n select *\n from table_a\n),\n\nunioned as (\n select *\n from except_a\n union all\n select *\n from except_b\n)\n\nselect *\nfrom unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.761534, "supported_languages": null}, "macro.dbt_utils.test_expression_is_true": {"name": "test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.test_expression_is_true", "macro_sql": "{% test expression_is_true(model, expression, column_name=None) %}\n {{ return(adapter.dispatch('test_expression_is_true', 'dbt_utils')(model, expression, column_name)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_expression_is_true"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.761921, "supported_languages": null}, "macro.dbt_utils.default__test_expression_is_true": {"name": "default__test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.default__test_expression_is_true", "macro_sql": "{% macro default__test_expression_is_true(model, expression, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else \"1\" %}\n\nselect\n {{ column_list }}\nfrom {{ model }}\n{% if column_name is none %}\nwhere not({{ expression }})\n{%- else %}\nwhere not({{ column_name }} {{ expression }})\n{%- endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.762283, "supported_languages": null}, "macro.dbt_utils.test_not_null_proportion": {"name": "test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.test_not_null_proportion", "macro_sql": "{% macro test_not_null_proportion(model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_null_proportion', 'dbt_utils')(model, group_by_columns, **kwargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_null_proportion"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.763021, "supported_languages": null}, "macro.dbt_utils.default__test_not_null_proportion": {"name": "default__test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.default__test_not_null_proportion", "macro_sql": "{% macro default__test_not_null_proportion(model, group_by_columns) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('arg')) %}\n{% set at_least = kwargs.get('at_least', kwargs.get('arg')) %}\n{% set at_most = kwargs.get('at_most', kwargs.get('arg', 1)) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith validation as (\n select\n {{select_gb_cols}}\n sum(case when {{ column_name }} is null then 0 else 1 end) / cast(count(*) as {{ dbt.type_numeric() }}) as not_null_proportion\n from {{ model }}\n {{groupby_gb_cols}}\n),\nvalidation_errors as (\n select\n {{select_gb_cols}}\n not_null_proportion\n from validation\n where not_null_proportion < {{ at_least }} or not_null_proportion > {{ at_most }}\n)\nselect\n *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.763835, "supported_languages": null}, "macro.dbt_utils.test_sequential_values": {"name": "test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.test_sequential_values", "macro_sql": "{% test sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n {{ return(adapter.dispatch('test_sequential_values', 'dbt_utils')(model, column_name, interval, datepart, group_by_columns)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_sequential_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7647302, "supported_languages": null}, "macro.dbt_utils.default__test_sequential_values": {"name": "default__test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.default__test_sequential_values", "macro_sql": "{% macro default__test_sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n{% set previous_column_name = \"previous_\" ~ dbt_utils.slugify(column_name) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(',') + ', ' %}\n {% set partition_gb_cols = 'partition by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith windowed as (\n\n select\n {{ select_gb_cols }}\n {{ column_name }},\n lag({{ column_name }}) over (\n {{partition_gb_cols}}\n order by {{ column_name }}\n ) as {{ previous_column_name }}\n from {{ model }}\n),\n\nvalidation_errors as (\n select\n *\n from windowed\n {% if datepart %}\n where not(cast({{ column_name }} as {{ dbt.type_timestamp() }})= cast({{ dbt.dateadd(datepart, interval, previous_column_name) }} as {{ dbt.type_timestamp() }}))\n {% else %}\n where not({{ column_name }} = {{ previous_column_name }} + {{ interval }})\n {% endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.slugify", "macro.dbt.type_timestamp", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7655869, "supported_languages": null}, "macro.dbt_utils.test_equality": {"name": "test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.test_equality", "macro_sql": "{% test equality(model, compare_model, compare_columns=None, exclude_columns=None, precision = None) %}\n {{ return(adapter.dispatch('test_equality', 'dbt_utils')(model, compare_model, compare_columns, exclude_columns, precision)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.767491, "supported_languages": null}, "macro.dbt_utils.default__test_equality": {"name": "default__test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.default__test_equality", "macro_sql": "{% macro default__test_equality(model, compare_model, compare_columns=None, exclude_columns=None, precision = None) %}\n\n{%- if compare_columns and exclude_columns -%}\n {{ exceptions.raise_compiler_error(\"Both a compare and an ignore list were provided to the `equality` macro. Only one is allowed\") }}\n{%- endif -%}\n\n{% set set_diff %}\n count(*) + coalesce(abs(\n sum(case when which_diff = 'a_minus_b' then 1 else 0 end) -\n sum(case when which_diff = 'b_minus_a' then 1 else 0 end)\n ), 0)\n{% endset %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = set_diff) }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n\n\n-- setup\n{%- do dbt_utils._is_relation(model, 'test_equality') -%}\n\n{# Ensure there are no extra columns in the compare_model vs model #}\n{%- if not compare_columns -%}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- do dbt_utils._is_ephemeral(compare_model, 'test_equality') -%}\n\n {%- set model_columns = adapter.get_columns_in_relation(model) -%}\n {%- set compare_model_columns = adapter.get_columns_in_relation(compare_model) -%}\n\n\n {%- if exclude_columns -%}\n {#-- Lower case ignore columns for easier comparison --#}\n {%- set exclude_columns = exclude_columns | map(\"lower\") | list %}\n\n {# Filter out the excluded columns #}\n {%- set include_columns = [] %}\n {%- set include_model_columns = [] %}\n {%- for column in model_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n {%- for column in compare_model_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_model_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n\n {%- set compare_columns_set = set(include_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- set compare_model_columns_set = set(include_model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- else -%}\n {%- set compare_columns_set = set(model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- set compare_model_columns_set = set(compare_model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- endif -%}\n\n {% if compare_columns_set != compare_model_columns_set %}\n {{ exceptions.raise_compiler_error(compare_model ~\" has less columns than \" ~ model ~ \", please ensure they have the same columns or use the `compare_columns` or `exclude_columns` arguments to subset them.\") }}\n {% endif %}\n\n\n{% endif %}\n\n{%- if not precision -%}\n {%- if not compare_columns -%}\n {# \n You cannot get the columns in an ephemeral model (due to not existing in the information schema),\n so if the user does not provide an explicit list of columns we must error in the case it is ephemeral\n #}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- set compare_columns = adapter.get_columns_in_relation(model)-%}\n\n {%- if exclude_columns -%}\n {#-- Lower case ignore columns for easier comparison --#}\n {%- set exclude_columns = exclude_columns | map(\"lower\") | list %}\n\n {# Filter out the excluded columns #}\n {%- set include_columns = [] %}\n {%- for column in compare_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n\n {%- set compare_columns = include_columns | map(attribute='quoted') %}\n {%- else -%} {# Compare columns provided #}\n {%- set compare_columns = compare_columns | map(attribute='quoted') %}\n {%- endif -%}\n {%- endif -%}\n\n {% set compare_cols_csv = compare_columns | join(', ') %}\n\n{% else %} {# Precision required #}\n {#-\n If rounding is required, we need to get the types, so it cannot be ephemeral even if they provide column names\n -#}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- set columns = adapter.get_columns_in_relation(model) -%}\n\n {% set columns_list = [] %}\n {%- for col in columns -%}\n {%- if (\n (col.name|lower in compare_columns|map('lower') or not compare_columns) and\n (col.name|lower not in exclude_columns|map('lower') or not exclude_columns)\n ) -%}\n {# Databricks double type is not picked up by any number type checks in dbt #}\n {%- if col.is_float() or col.is_numeric() or col.data_type == 'double' -%}\n {# Cast is required due to postgres not having round for a double precision number #}\n {%- do columns_list.append('round(cast(' ~ col.quoted ~ ' as ' ~ dbt.type_numeric() ~ '),' ~ precision ~ ') as ' ~ col.quoted) -%}\n {%- else -%} {# Non-numeric type #}\n {%- do columns_list.append(col.quoted) -%}\n {%- endif -%}\n {% endif %}\n {%- endfor -%}\n\n {% set compare_cols_csv = columns_list | join(', ') %}\n\n{% endif %}\n\nwith a as (\n\n select * from {{ model }}\n\n),\n\nb as (\n\n select * from {{ compare_model }}\n\n),\n\na_minus_b as (\n\n select {{compare_cols_csv}} from a\n {{ dbt.except() }}\n select {{compare_cols_csv}} from b\n\n),\n\nb_minus_a as (\n\n select {{compare_cols_csv}} from b\n {{ dbt.except() }}\n select {{compare_cols_csv}} from a\n\n),\n\nunioned as (\n\n select 'a_minus_b' as which_diff, a_minus_b.* from a_minus_b\n union all\n select 'b_minus_a' as which_diff, b_minus_a.* from b_minus_a\n\n)\n\nselect * from unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.type_numeric", "macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.771323, "supported_languages": null}, "macro.dbt_utils.test_not_empty_string": {"name": "test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.test_not_empty_string", "macro_sql": "{% test not_empty_string(model, column_name, trim_whitespace=true) %}\n\n {{ return(adapter.dispatch('test_not_empty_string', 'dbt_utils')(model, column_name, trim_whitespace)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_empty_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7718441, "supported_languages": null}, "macro.dbt_utils.default__test_not_empty_string": {"name": "default__test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.default__test_not_empty_string", "macro_sql": "{% macro default__test_not_empty_string(model, column_name, trim_whitespace=true) %}\n\n with\n \n all_values as (\n\n select \n\n\n {% if trim_whitespace == true -%}\n\n trim({{ column_name }}) as {{ column_name }}\n\n {%- else -%}\n\n {{ column_name }}\n\n {%- endif %}\n \n from {{ model }}\n\n ),\n\n errors as (\n\n select * from all_values\n where {{ column_name }} = ''\n\n )\n\n select * from errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.772135, "supported_languages": null}, "macro.dbt_utils.test_mutually_exclusive_ranges": {"name": "test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.test_mutually_exclusive_ranges", "macro_sql": "{% test mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n {{ return(adapter.dispatch('test_mutually_exclusive_ranges', 'dbt_utils')(model, lower_bound_column, upper_bound_column, partition_by, gaps, zero_length_range_allowed)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_mutually_exclusive_ranges"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.775768, "supported_languages": null}, "macro.dbt_utils.default__test_mutually_exclusive_ranges": {"name": "default__test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.default__test_mutually_exclusive_ranges", "macro_sql": "{% macro default__test_mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n{% if gaps == 'not_allowed' %}\n {% set allow_gaps_operator='=' %}\n {% set allow_gaps_operator_in_words='equal_to' %}\n{% elif gaps == 'allowed' %}\n {% set allow_gaps_operator='<=' %}\n {% set allow_gaps_operator_in_words='less_than_or_equal_to' %}\n{% elif gaps == 'required' %}\n {% set allow_gaps_operator='<' %}\n {% set allow_gaps_operator_in_words='less_than' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`gaps` argument for mutually_exclusive_ranges test must be one of ['not_allowed', 'allowed', 'required'] Got: '\" ~ gaps ~\"'.'\"\n ) }}\n{% endif %}\n{% if not zero_length_range_allowed %}\n {% set allow_zero_length_operator='<' %}\n {% set allow_zero_length_operator_in_words='less_than' %}\n{% elif zero_length_range_allowed %}\n {% set allow_zero_length_operator='<=' %}\n {% set allow_zero_length_operator_in_words='less_than_or_equal_to' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`zero_length_range_allowed` argument for mutually_exclusive_ranges test must be one of [true, false] Got: '\" ~ zero_length_range_allowed ~\"'.'\"\n ) }}\n{% endif %}\n\n{% set partition_clause=\"partition by \" ~ partition_by if partition_by else '' %}\n\nwith window_functions as (\n\n select\n {% if partition_by %}\n {{ partition_by }} as partition_by_col,\n {% endif %}\n {{ lower_bound_column }} as lower_bound,\n {{ upper_bound_column }} as upper_bound,\n\n lead({{ lower_bound_column }}) over (\n {{ partition_clause }}\n order by {{ lower_bound_column }}, {{ upper_bound_column }}\n ) as next_lower_bound,\n\n row_number() over (\n {{ partition_clause }}\n order by {{ lower_bound_column }} desc, {{ upper_bound_column }} desc\n ) = 1 as is_last_record\n\n from {{ model }}\n\n),\n\ncalc as (\n -- We want to return records where one of our assumptions fails, so we'll use\n -- the `not` function with `and` statements so we can write our assumptions more cleanly\n select\n *,\n\n -- For each record: lower_bound should be < upper_bound.\n -- Coalesce it to return an error on the null case (implicit assumption\n -- these columns are not_null)\n coalesce(\n lower_bound {{ allow_zero_length_operator }} upper_bound,\n false\n ) as lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound,\n\n -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound.\n -- Coalesce it to handle null cases for the last record.\n coalesce(\n upper_bound {{ allow_gaps_operator }} next_lower_bound,\n is_last_record,\n false\n ) as upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n\n from window_functions\n\n),\n\nvalidation_errors as (\n\n select\n *\n from calc\n\n where not(\n -- THE FOLLOWING SHOULD BE TRUE --\n lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound\n and upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n )\n)\n\nselect * from validation_errors\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.777196, "supported_languages": null}, "macro.dbt_utils.pretty_log_format": {"name": "pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.pretty_log_format", "macro_sql": "{% macro pretty_log_format(message) %}\n {{ return(adapter.dispatch('pretty_log_format', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.777475, "supported_languages": null}, "macro.dbt_utils.default__pretty_log_format": {"name": "default__pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.default__pretty_log_format", "macro_sql": "{% macro default__pretty_log_format(message) %}\n {{ return( dbt_utils.pretty_time() ~ ' + ' ~ message) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.777635, "supported_languages": null}, "macro.dbt_utils._is_relation": {"name": "_is_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_relation.sql", "original_file_path": "macros/jinja_helpers/_is_relation.sql", "unique_id": "macro.dbt_utils._is_relation", "macro_sql": "{% macro _is_relation(obj, macro) %}\n {%- if not (obj is mapping and obj.get('metadata', {}).get('type', '').endswith('Relation')) -%}\n {%- do exceptions.raise_compiler_error(\"Macro \" ~ macro ~ \" expected a Relation but received the value: \" ~ obj) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.778049, "supported_languages": null}, "macro.dbt_utils.pretty_time": {"name": "pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.pretty_time", "macro_sql": "{% macro pretty_time(format='%H:%M:%S') %}\n {{ return(adapter.dispatch('pretty_time', 'dbt_utils')(format)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7783182, "supported_languages": null}, "macro.dbt_utils.default__pretty_time": {"name": "default__pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.default__pretty_time", "macro_sql": "{% macro default__pretty_time(format='%H:%M:%S') %}\n {{ return(modules.datetime.datetime.now().strftime(format)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.778502, "supported_languages": null}, "macro.dbt_utils.log_info": {"name": "log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.log_info", "macro_sql": "{% macro log_info(message) %}\n {{ return(adapter.dispatch('log_info', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__log_info"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.778742, "supported_languages": null}, "macro.dbt_utils.default__log_info": {"name": "default__log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.default__log_info", "macro_sql": "{% macro default__log_info(message) %}\n {{ log(dbt_utils.pretty_log_format(message), info=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7789068, "supported_languages": null}, "macro.dbt_utils.slugify": {"name": "slugify", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/slugify.sql", "original_file_path": "macros/jinja_helpers/slugify.sql", "unique_id": "macro.dbt_utils.slugify", "macro_sql": "{% macro slugify(string) %}\n\n{% if not string %}\n{{ return('') }}\n{% endif %}\n\n{#- Lower case the string -#}\n{% set string = string | lower %}\n{#- Replace spaces and dashes with underscores -#}\n{% set string = modules.re.sub('[ -]+', '_', string) %}\n{#- Only take letters, numbers, and underscores -#}\n{% set string = modules.re.sub('[^a-z0-9_]+', '', string) %}\n{#- Prepends \"_\" if string begins with a number -#}\n{% set string = modules.re.sub('^[0-9]', '_' + string[0], string) %}\n\n{{ return(string) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.779586, "supported_languages": null}, "macro.dbt_utils._is_ephemeral": {"name": "_is_ephemeral", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_ephemeral.sql", "original_file_path": "macros/jinja_helpers/_is_ephemeral.sql", "unique_id": "macro.dbt_utils._is_ephemeral", "macro_sql": "{% macro _is_ephemeral(obj, macro) %}\n {%- if obj.is_cte -%}\n {% set ephemeral_prefix = api.Relation.add_ephemeral_prefix('') %}\n {% if obj.name.startswith(ephemeral_prefix) %}\n {% set model_name = obj.name[(ephemeral_prefix|length):] %}\n {% else %}\n {% set model_name = obj.name %}\n {%- endif -%}\n {% set error_message %}\nThe `{{ macro }}` macro cannot be used with ephemeral models, as it relies on the information schema.\n\n`{{ model_name }}` is an ephemeral model. Consider making it a view or table instead.\n {% endset %}\n {%- do exceptions.raise_compiler_error(error_message) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7803981, "supported_languages": null}, "macro.dbt_utils.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_utils')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.781149, "supported_languages": null}, "macro.dbt_utils.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.781714, "supported_languages": null}, "macro.dbt_utils.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_utils')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.781937, "supported_languages": null}, "macro.dbt_utils.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{dbt_utils.generate_series(\n dbt_utils.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.generate_series", "macro.dbt_utils.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.782285, "supported_languages": null}, "macro.dbt_utils.safe_subtract": {"name": "safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.safe_subtract", "macro_sql": "{%- macro safe_subtract(field_list) -%}\n {{ return(adapter.dispatch('safe_subtract', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_subtract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7826529, "supported_languages": null}, "macro.dbt_utils.default__safe_subtract": {"name": "default__safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.default__safe_subtract", "macro_sql": "\n\n{%- macro default__safe_subtract(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_subtract` macro takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' -\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.783247, "supported_languages": null}, "macro.dbt_utils.nullcheck_table": {"name": "nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.nullcheck_table", "macro_sql": "{% macro nullcheck_table(relation) %}\n {{ return(adapter.dispatch('nullcheck_table', 'dbt_utils')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7835488, "supported_languages": null}, "macro.dbt_utils.default__nullcheck_table": {"name": "default__nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.default__nullcheck_table", "macro_sql": "{% macro default__nullcheck_table(relation) %}\n\n {%- do dbt_utils._is_relation(relation, 'nullcheck_table') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'nullcheck_table') -%}\n {% set cols = adapter.get_columns_in_relation(relation) %}\n\n select {{ dbt_utils.nullcheck(cols) }}\n from {{relation}}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.783871, "supported_languages": null}, "macro.dbt_utils.get_relations_by_pattern": {"name": "get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.get_relations_by_pattern", "macro_sql": "{% macro get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_pattern', 'dbt_utils')(schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7845361, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_pattern": {"name": "default__get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_pattern", "macro_sql": "{% macro default__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7853382, "supported_languages": null}, "macro.dbt_utils.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.786155, "supported_languages": null}, "macro.dbt_utils.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7865758, "supported_languages": null}, "macro.dbt_utils.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7867908, "supported_languages": null}, "macro.dbt_utils.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_utils.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7873018, "supported_languages": null}, "macro.dbt_utils.get_relations_by_prefix": {"name": "get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.get_relations_by_prefix", "macro_sql": "{% macro get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_prefix', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.787968, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_prefix": {"name": "default__get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_prefix", "macro_sql": "{% macro default__get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_prefix_sql(schema, prefix, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7887738, "supported_languages": null}, "macro.dbt_utils.get_tables_by_prefix_sql": {"name": "get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_prefix_sql", "macro_sql": "{% macro get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_prefix_sql', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.789161, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_prefix_sql": {"name": "default__get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_prefix_sql", "macro_sql": "{% macro default__get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(\n schema_pattern = schema,\n table_pattern = prefix ~ '%',\n exclude = exclude,\n database = database\n ) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.78943, "supported_languages": null}, "macro.dbt_utils.star": {"name": "star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.star", "macro_sql": "{% macro star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {{ return(adapter.dispatch('star', 'dbt_utils')(from, relation_alias, except, prefix, suffix, quote_identifiers)) }}\r\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__star"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.790669, "supported_languages": null}, "macro.dbt_utils.default__star": {"name": "default__star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.default__star", "macro_sql": "{% macro default__star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {%- do dbt_utils._is_relation(from, 'star') -%}\r\n {%- do dbt_utils._is_ephemeral(from, 'star') -%}\r\n\r\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\r\n {%- if not execute -%}\r\n {% do return('*') %}\r\n {%- endif -%}\r\n\r\n {% set cols = dbt_utils.get_filtered_columns_in_relation(from, except) %}\r\n\r\n {%- if cols|length <= 0 -%}\r\n {% if flags.WHICH == 'compile' %}\r\n {% set response %}\r\n*\r\n/* No columns were returned. Maybe the relation doesn't exist yet \r\nor all columns were excluded. This star is only output during \r\ndbt compile, and exists to keep SQLFluff happy. */\r\n {% endset %}\r\n {% do return(response) %}\r\n {% else %}\r\n {% do return(\"/* no columns returned from star() macro */\") %}\r\n {% endif %}\r\n {%- else -%}\r\n {%- for col in cols %}\r\n {%- if relation_alias %}{{ relation_alias }}.{% else %}{%- endif -%}\r\n {%- if quote_identifiers -%}\r\n {{ adapter.quote(col)|trim }} {%- if prefix!='' or suffix!='' %} as {{ adapter.quote(prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {%- else -%}\r\n {{ col|trim }} {%- if prefix!='' or suffix!='' %} as {{ (prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {% endif %}\r\n {%- if not loop.last %},{{ '\\n ' }}{%- endif -%}\r\n {%- endfor -%}\r\n {% endif %}\r\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.792056, "supported_languages": null}, "macro.dbt_utils.unpivot": {"name": "unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.unpivot", "macro_sql": "{% macro unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value', quote_identifiers=False) -%}\n {{ return(adapter.dispatch('unpivot', 'dbt_utils')(relation, cast_to, exclude, remove, field_name, value_name, quote_identifiers)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__unpivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7936692, "supported_languages": null}, "macro.dbt_utils.default__unpivot": {"name": "default__unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.default__unpivot", "macro_sql": "{% macro default__unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value', quote_identifiers=False) -%}\n\n {% if not relation %}\n {{ exceptions.raise_compiler_error(\"Error: argument `relation` is required for `unpivot` macro.\") }}\n {% endif %}\n\n {%- set exclude = exclude if exclude is not none else [] %}\n {%- set remove = remove if remove is not none else [] %}\n\n {%- set include_cols = [] %}\n\n {%- set table_columns = {} %}\n\n {%- do table_columns.update({relation: []}) %}\n\n {%- do dbt_utils._is_relation(relation, 'unpivot') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'unpivot') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) %}\n\n {%- for col in cols -%}\n {%- if col.column.lower() not in remove|map('lower') and col.column.lower() not in exclude|map('lower') -%}\n {% do include_cols.append(col) %}\n {%- endif %}\n {%- endfor %}\n\n\n {%- for col in include_cols -%}\n {%- set current_col_name = adapter.quote(col.column) if quote_identifiers else col.column -%}\n select\n {%- for exclude_col in exclude %}\n {{ adapter.quote(exclude_col) if quote_identifiers else exclude_col }},\n {%- endfor %}\n\n cast('{{ col.column }}' as {{ dbt.type_string() }}) as {{ adapter.quote(field_name) if quote_identifiers else field_name }},\n cast( {% if col.data_type == 'boolean' %}\n {{ dbt.cast_bool_to_text(current_col_name) }}\n {% else %}\n {{ current_col_name }}\n {% endif %}\n as {{ cast_to }}) as {{ adapter.quote(value_name) if quote_identifiers else value_name }}\n\n from {{ relation }}\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n {%- endfor -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.type_string", "macro.dbt.cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7954218, "supported_languages": null}, "macro.dbt_utils.safe_divide": {"name": "safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.safe_divide", "macro_sql": "{% macro safe_divide(numerator, denominator) -%}\n {{ return(adapter.dispatch('safe_divide', 'dbt_utils')(numerator, denominator)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_divide"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.7957041, "supported_languages": null}, "macro.dbt_utils.default__safe_divide": {"name": "default__safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.default__safe_divide", "macro_sql": "{% macro default__safe_divide(numerator, denominator) %}\n ( {{ numerator }} ) / nullif( ( {{ denominator }} ), 0)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.795834, "supported_languages": null}, "macro.dbt_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n {{ return(adapter.dispatch('union_relations', 'dbt_utils')(relations, column_override, include, exclude, source_column_name, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.799138, "supported_languages": null}, "macro.dbt_utils.default__union_relations": {"name": "default__union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.default__union_relations", "macro_sql": "\n\n{%- macro default__union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n {%- set all_excludes = [] -%}\n {%- set all_includes = [] -%}\n\n {%- if exclude -%}\n {%- for exc in exclude -%}\n {%- do all_excludes.append(exc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- if include -%}\n {%- for inc in include -%}\n {%- do all_includes.append(inc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column | lower in all_excludes -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column | lower not in all_includes -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n {%- set dbt_command = flags.WHICH -%}\n\n\n {% if dbt_command in ['run', 'build'] %}\n {% if (include | length > 0 or exclude | length > 0) and not column_superset.keys() %}\n {%- set relations_string -%}\n {%- for relation in relations -%}\n {{ relation.name }}\n {%- if not loop.last %}, {% endif -%}\n {%- endfor -%}\n {%- endset -%}\n\n {%- set error_message -%}\n There were no columns found to union for relations {{ relations_string }}\n {%- endset -%}\n\n {{ exceptions.raise_compiler_error(error_message) }}\n {%- endif -%}\n {%- endif -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n {%- if source_column_name is not none %}\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {%- endif %}\n\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ relation }}\n\n {% if where -%}\n where {{ where }}\n {%- endif %}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8023858, "supported_languages": null}, "macro.dbt_utils.group_by": {"name": "group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.group_by", "macro_sql": "{%- macro group_by(n) -%}\n {{ return(adapter.dispatch('group_by', 'dbt_utils')(n)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__group_by"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.802688, "supported_languages": null}, "macro.dbt_utils.default__group_by": {"name": "default__group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.default__group_by", "macro_sql": "\n\n{%- macro default__group_by(n) -%}\n\n group by {% for i in range(1, n + 1) -%}\n {{ i }}{{ ',' if not loop.last }} \n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.802922, "supported_languages": null}, "macro.dbt_utils.deduplicate": {"name": "deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.deduplicate", "macro_sql": "{%- macro deduplicate(relation, partition_by, order_by) -%}\n {{ return(adapter.dispatch('deduplicate', 'dbt_utils')(relation, partition_by, order_by)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.postgres__deduplicate"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.803794, "supported_languages": null}, "macro.dbt_utils.default__deduplicate": {"name": "default__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.default__deduplicate", "macro_sql": "\n\n{%- macro default__deduplicate(relation, partition_by, order_by) -%}\n\n with row_numbered as (\n select\n _inner.*,\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) as rn\n from {{ relation }} as _inner\n )\n\n select\n distinct data.*\n from {{ relation }} as data\n {#\n -- Not all DBs will support natural joins but the ones that do include:\n -- Oracle, MySQL, SQLite, Redshift, Teradata, Materialize, Databricks\n -- Apache Spark, SingleStore, Vertica\n -- Those that do not appear to support natural joins include:\n -- SQLServer, Trino, Presto, Rockset, Athena\n #}\n natural join row_numbered\n where row_numbered.rn = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.803998, "supported_languages": null}, "macro.dbt_utils.redshift__deduplicate": {"name": "redshift__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.redshift__deduplicate", "macro_sql": "{% macro redshift__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }} as tt\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8042161, "supported_languages": null}, "macro.dbt_utils.postgres__deduplicate": {"name": "postgres__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.postgres__deduplicate", "macro_sql": "\n{%- macro postgres__deduplicate(relation, partition_by, order_by) -%}\n\n select\n distinct on ({{ partition_by }}) *\n from {{ relation }}\n order by {{ partition_by }}{{ ',' ~ order_by }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.804435, "supported_languages": null}, "macro.dbt_utils.snowflake__deduplicate": {"name": "snowflake__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.snowflake__deduplicate", "macro_sql": "\n{%- macro snowflake__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }}\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8046138, "supported_languages": null}, "macro.dbt_utils.databricks__deduplicate": {"name": "databricks__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.databricks__deduplicate", "macro_sql": "\n{%- macro databricks__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }}\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.80478, "supported_languages": null}, "macro.dbt_utils.bigquery__deduplicate": {"name": "bigquery__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.bigquery__deduplicate", "macro_sql": "\n{%- macro bigquery__deduplicate(relation, partition_by, order_by) -%}\n\n select unique.*\n from (\n select\n array_agg (\n original\n order by {{ order_by }}\n limit 1\n )[offset(0)] unique\n from {{ relation }} original\n group by {{ partition_by }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.80495, "supported_languages": null}, "macro.dbt_utils.surrogate_key": {"name": "surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.surrogate_key", "macro_sql": "{%- macro surrogate_key(field_list) -%}\n {% set frustrating_jinja_feature = varargs %}\n {{ return(adapter.dispatch('surrogate_key', 'dbt_utils')(field_list, *varargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.805324, "supported_languages": null}, "macro.dbt_utils.default__surrogate_key": {"name": "default__surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.default__surrogate_key", "macro_sql": "\n\n{%- macro default__surrogate_key(field_list) -%}\n\n{%- set error_message = '\nWarning: `dbt_utils.surrogate_key` has been replaced by \\\n`dbt_utils.generate_surrogate_key`. The new macro treats null values \\\ndifferently to empty strings. To restore the behaviour of the original \\\nmacro, add a global variable in dbt_project.yml called \\\n`surrogate_key_treat_nulls_as_empty_strings` to your \\\ndbt_project.yml file with a value of True. \\\nThe {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.80555, "supported_languages": null}, "macro.dbt_utils.safe_add": {"name": "safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.safe_add", "macro_sql": "{%- macro safe_add(field_list) -%}\n {{ return(adapter.dispatch('safe_add', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.805912, "supported_languages": null}, "macro.dbt_utils.default__safe_add": {"name": "default__safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.default__safe_add", "macro_sql": "\n\n{%- macro default__safe_add(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_add` macro now takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.warn(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' +\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.806423, "supported_languages": null}, "macro.dbt_utils.nullcheck": {"name": "nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.nullcheck", "macro_sql": "{% macro nullcheck(cols) %}\n {{ return(adapter.dispatch('nullcheck', 'dbt_utils')(cols)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.806748, "supported_languages": null}, "macro.dbt_utils.default__nullcheck": {"name": "default__nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.default__nullcheck", "macro_sql": "{% macro default__nullcheck(cols) %}\n{%- for col in cols %}\n\n {% if col.is_string() -%}\n\n nullif({{col.name}},'') as {{col.name}}\n\n {%- else -%}\n\n {{col.name}}\n\n {%- endif -%}\n\n{%- if not loop.last -%} , {%- endif -%}\n\n{%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.80706, "supported_languages": null}, "macro.dbt_utils.get_tables_by_pattern_sql": {"name": "get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_pattern_sql", "macro_sql": "{% macro get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_pattern_sql', 'dbt_utils')\n (schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.810345, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_pattern_sql": {"name": "default__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_pattern_sql", "macro_sql": "{% macro default__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n select distinct\n table_schema as {{ adapter.quote('table_schema') }},\n table_name as {{ adapter.quote('table_name') }},\n {{ dbt_utils.get_table_types_sql() }}\n from {{ database }}.information_schema.tables\n where table_schema ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.810685, "supported_languages": null}, "macro.dbt_utils.redshift__get_tables_by_pattern_sql": {"name": "redshift__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.redshift__get_tables_by_pattern_sql", "macro_sql": "{% macro redshift__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {% set sql %}\n select distinct\n table_schema as {{ adapter.quote('table_schema') }},\n table_name as {{ adapter.quote('table_name') }},\n {{ dbt_utils.get_table_types_sql() }}\n from \"{{ database }}\".\"information_schema\".\"tables\"\n where table_schema ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n union all\n select distinct\n schemaname as {{ adapter.quote('table_schema') }},\n tablename as {{ adapter.quote('table_name') }},\n 'external' as {{ adapter.quote('table_type') }}\n from svv_external_tables\n where redshift_database_name = '{{ database }}'\n and schemaname ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n {% endset %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.811636, "supported_languages": null}, "macro.dbt_utils.bigquery__get_tables_by_pattern_sql": {"name": "bigquery__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.bigquery__get_tables_by_pattern_sql", "macro_sql": "{% macro bigquery__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {% if '%' in schema_pattern %}\n {% set schemata=dbt_utils._bigquery__get_matching_schemata(schema_pattern, database) %}\n {% else %}\n {% set schemata=[schema_pattern] %}\n {% endif %}\n\n {% set sql %}\n {% for schema in schemata %}\n select distinct\n table_schema,\n table_name,\n {{ dbt_utils.get_table_types_sql() }}\n\n from {{ adapter.quote(database) }}.{{ schema }}.INFORMATION_SCHEMA.TABLES\n where lower(table_name) like lower ('{{ table_pattern }}')\n and lower(table_name) not like lower ('{{ exclude }}')\n\n {% if not loop.last %} union all {% endif %}\n\n {% endfor %}\n {% endset %}\n\n {{ return(sql) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._bigquery__get_matching_schemata", "macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.812316, "supported_languages": null}, "macro.dbt_utils._bigquery__get_matching_schemata": {"name": "_bigquery__get_matching_schemata", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils._bigquery__get_matching_schemata", "macro_sql": "{% macro _bigquery__get_matching_schemata(schema_pattern, database) %}\n {% if execute %}\n\n {% set sql %}\n select schema_name from {{ adapter.quote(database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like lower('{{ schema_pattern }}')\n {% endset %}\n\n {% set results=run_query(sql) %}\n\n {% set schemata=results.columns['schema_name'].values() %}\n\n {{ return(schemata) }}\n\n {% else %}\n\n {{ return([]) }}\n\n {% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.812793, "supported_languages": null}, "macro.dbt_utils.get_column_values": {"name": "get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.get_column_values", "macro_sql": "{% macro get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {{ return(adapter.dispatch('get_column_values', 'dbt_utils')(table, column, order_by, max_records, default, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_column_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8139222, "supported_languages": null}, "macro.dbt_utils.default__get_column_values": {"name": "default__get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.default__get_column_values", "macro_sql": "{% macro default__get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {% set default = [] if not default %}\n {{ return(default) }}\n {% endif %}\n\n {%- do dbt_utils._is_ephemeral(table, 'get_column_values') -%}\n\n {# Not all relations are tables. Renaming for internal clarity without breaking functionality for anyone using named arguments #}\n {# TODO: Change the method signature in a future 0.x.0 release #}\n {%- set target_relation = table -%}\n\n {# adapter.load_relation is a convenience wrapper to avoid building a Relation when we already have one #}\n {% set relation_exists = (load_relation(target_relation)) is not none %}\n\n {%- call statement('get_column_values', fetch_result=true) %}\n\n {%- if not relation_exists and default is none -%}\n\n {{ exceptions.raise_compiler_error(\"In get_column_values(): relation \" ~ target_relation ~ \" does not exist and no default value was provided.\") }}\n\n {%- elif not relation_exists and default is not none -%}\n\n {{ log(\"Relation \" ~ target_relation ~ \" does not exist. Returning the default value: \" ~ default) }}\n\n {{ return(default) }}\n\n {%- else -%}\n\n\n select\n {{ column }} as value\n\n from {{ target_relation }}\n\n {% if where is not none %}\n where {{ where }}\n {% endif %}\n\n group by {{ column }}\n order by {{ order_by }}\n\n {% if max_records is not none %}\n limit {{ max_records }}\n {% endif %}\n\n {% endif %}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_column_values') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values) }}\n {%- else -%}\n {{ return(default) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_ephemeral", "macro.dbt.load_relation", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8153548, "supported_languages": null}, "macro.dbt_utils.pivot": {"name": "pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.pivot", "macro_sql": "{% macro pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {{ return(adapter.dispatch('pivot', 'dbt_utils')(column, values, alias, agg, cmp, prefix, suffix, then_value, else_value, quote_identifiers, distinct)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.816393, "supported_languages": null}, "macro.dbt_utils.default__pivot": {"name": "default__pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.default__pivot", "macro_sql": "{% macro default__pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {% for value in values %}\n {{ agg }}(\n {% if distinct %} distinct {% endif %}\n case\n when {{ column }} {{ cmp }} '{{ dbt.escape_single_quotes(value) }}'\n then {{ then_value }}\n else {{ else_value }}\n end\n )\n {% if alias %}\n {% if quote_identifiers %}\n as {{ adapter.quote(prefix ~ value ~ suffix) }}\n {% else %}\n as {{ dbt_utils.slugify(prefix ~ value ~ suffix) }}\n {% endif %}\n {% endif %}\n {% if not loop.last %},{% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.escape_single_quotes", "macro.dbt_utils.slugify"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8172941, "supported_languages": null}, "macro.dbt_utils.get_filtered_columns_in_relation": {"name": "get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.get_filtered_columns_in_relation", "macro_sql": "{% macro get_filtered_columns_in_relation(from, except=[]) -%}\n {{ return(adapter.dispatch('get_filtered_columns_in_relation', 'dbt_utils')(from, except)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.817774, "supported_languages": null}, "macro.dbt_utils.default__get_filtered_columns_in_relation": {"name": "default__get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.default__get_filtered_columns_in_relation", "macro_sql": "{% macro default__get_filtered_columns_in_relation(from, except=[]) -%}\n {%- do dbt_utils._is_relation(from, 'get_filtered_columns_in_relation') -%}\n {%- do dbt_utils._is_ephemeral(from, 'get_filtered_columns_in_relation') -%}\n\n {# -- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {{ return('') }}\n {% endif %}\n\n {%- set include_cols = [] %}\n {%- set cols = adapter.get_columns_in_relation(from) -%}\n {%- set except = except | map(\"lower\") | list %}\n {%- for col in cols -%}\n {%- if col.column|lower not in except -%}\n {% do include_cols.append(col.column) %}\n {%- endif %}\n {%- endfor %}\n\n {{ return(include_cols) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.818465, "supported_languages": null}, "macro.dbt_utils.width_bucket": {"name": "width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.width_bucket", "macro_sql": "{% macro width_bucket(expr, min_value, max_value, num_buckets) %}\n {{ return(adapter.dispatch('width_bucket', 'dbt_utils') (expr, min_value, max_value, num_buckets)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__width_bucket"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.819229, "supported_languages": null}, "macro.dbt_utils.default__width_bucket": {"name": "default__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.default__width_bucket", "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.81967, "supported_languages": null}, "macro.dbt_utils.snowflake__width_bucket": {"name": "snowflake__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.snowflake__width_bucket", "macro_sql": "{% macro snowflake__width_bucket(expr, min_value, max_value, num_buckets) %}\n width_bucket({{ expr }}, {{ min_value }}, {{ max_value }}, {{ num_buckets }} )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.819849, "supported_languages": null}, "macro.dbt_utils.get_query_results_as_dict": {"name": "get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.get_query_results_as_dict", "macro_sql": "{% macro get_query_results_as_dict(query) %}\n {{ return(adapter.dispatch('get_query_results_as_dict', 'dbt_utils')(query)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_query_results_as_dict"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.820225, "supported_languages": null}, "macro.dbt_utils.default__get_query_results_as_dict": {"name": "default__get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.default__get_query_results_as_dict", "macro_sql": "{% macro default__get_query_results_as_dict(query) %}\n\n{# This macro returns a dictionary of the form {column_name: (tuple_of_results)} #}\n\n {%- call statement('get_query_results', fetch_result=True,auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {% set sql_results={} %}\n\n {%- if execute -%}\n {% set sql_results_table = load_result('get_query_results').table.columns %}\n {% for column_name, column in sql_results_table.items() %}\n {% do sql_results.update({column_name: column.values()}) %}\n {% endfor %}\n {%- endif -%}\n\n {{ return(sql_results) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.820792, "supported_languages": null}, "macro.dbt_utils.generate_surrogate_key": {"name": "generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.generate_surrogate_key", "macro_sql": "{%- macro generate_surrogate_key(field_list) -%}\n {{ return(adapter.dispatch('generate_surrogate_key', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8212328, "supported_languages": null}, "macro.dbt_utils.default__generate_surrogate_key": {"name": "default__generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.default__generate_surrogate_key", "macro_sql": "\n\n{%- macro default__generate_surrogate_key(field_list) -%}\n\n{%- if var('surrogate_key_treat_nulls_as_empty_strings', False) -%}\n {%- set default_null_value = \"\" -%}\n{%- else -%}\n {%- set default_null_value = '_dbt_utils_surrogate_key_null_' -%}\n{%- endif -%}\n\n{%- set fields = [] -%}\n\n{%- for field in field_list -%}\n\n {%- do fields.append(\n \"coalesce(cast(\" ~ field ~ \" as \" ~ dbt.type_string() ~ \"), '\" ~ default_null_value ~\"')\"\n ) -%}\n\n {%- if not loop.last %}\n {%- do fields.append(\"'-'\") -%}\n {%- endif -%}\n\n{%- endfor -%}\n\n{{ dbt.hash(dbt.concat(fields)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.hash", "macro.dbt.concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8218298, "supported_languages": null}, "macro.dbt_utils.get_table_types_sql": {"name": "get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.get_table_types_sql", "macro_sql": "{%- macro get_table_types_sql() -%}\n {{ return(adapter.dispatch('get_table_types_sql', 'dbt_utils')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils.postgres__get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.822357, "supported_languages": null}, "macro.dbt_utils.default__get_table_types_sql": {"name": "default__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.default__get_table_types_sql", "macro_sql": "{% macro default__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'EXTERNAL TABLE' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.822496, "supported_languages": null}, "macro.dbt_utils.postgres__get_table_types_sql": {"name": "postgres__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.postgres__get_table_types_sql", "macro_sql": "{% macro postgres__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'FOREIGN' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8226292, "supported_languages": null}, "macro.dbt_utils.databricks__get_table_types_sql": {"name": "databricks__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.databricks__get_table_types_sql", "macro_sql": "{% macro databricks__get_table_types_sql() %}\n case table_type\n when 'MANAGED' then 'table'\n when 'BASE TABLE' then 'table'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.822763, "supported_languages": null}, "macro.dbt_utils.get_single_value": {"name": "get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.get_single_value", "macro_sql": "{% macro get_single_value(query, default=none) %}\n {{ return(adapter.dispatch('get_single_value', 'dbt_utils')(query, default)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_single_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.823328, "supported_languages": null}, "macro.dbt_utils.default__get_single_value": {"name": "default__get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.default__get_single_value", "macro_sql": "{% macro default__get_single_value(query, default) %}\n\n{# This macro returns the (0, 0) record in a query, i.e. the first row of the first column #}\n\n {%- call statement('get_query_result', fetch_result=True, auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {%- if execute -%}\n\n {% set r = load_result('get_query_result').table.columns[0].values() %}\n {% if r | length == 0 %}\n {% do print('Query `' ~ query ~ '` returned no rows. Using the default value: ' ~ default) %}\n {% set sql_result = default %}\n {% else %}\n {% set sql_result = r[0] %}\n {% endif %}\n \n {%- else -%}\n \n {% set sql_result = default %}\n \n {%- endif -%}\n\n {% do return(sql_result) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.824117, "supported_languages": null}, "macro.dbt_utils.degrees_to_radians": {"name": "degrees_to_radians", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.degrees_to_radians", "macro_sql": "{% macro degrees_to_radians(degrees) -%}\n acos(-1) * {{degrees}} / 180\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.825212, "supported_languages": null}, "macro.dbt_utils.haversine_distance": {"name": "haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.haversine_distance", "macro_sql": "{% macro haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n {{ return(adapter.dispatch('haversine_distance', 'dbt_utils')(lat1,lon1,lat2,lon2,unit)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__haversine_distance"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8254871, "supported_languages": null}, "macro.dbt_utils.default__haversine_distance": {"name": "default__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.default__haversine_distance", "macro_sql": "{% macro default__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n\n 2 * 3961 * asin(sqrt(power((sin(radians(({{ lat2 }} - {{ lat1 }}) / 2))), 2) +\n cos(radians({{lat1}})) * cos(radians({{lat2}})) *\n power((sin(radians(({{ lon2 }} - {{ lon1 }}) / 2))), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.826043, "supported_languages": null}, "macro.dbt_utils.bigquery__haversine_distance": {"name": "bigquery__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.bigquery__haversine_distance", "macro_sql": "{% macro bigquery__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{% set radians_lat1 = dbt_utils.degrees_to_radians(lat1) %}\n{% set radians_lat2 = dbt_utils.degrees_to_radians(lat2) %}\n{% set radians_lon1 = dbt_utils.degrees_to_radians(lon1) %}\n{% set radians_lon2 = dbt_utils.degrees_to_radians(lon2) %}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n 2 * 3961 * asin(sqrt(power(sin(({{ radians_lat2 }} - {{ radians_lat1 }}) / 2), 2) +\n cos({{ radians_lat1 }}) * cos({{ radians_lat2 }}) *\n power(sin(({{ radians_lon2 }} - {{ radians_lon1 }}) / 2), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.degrees_to_radians"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.826796, "supported_languages": null}, "macro.spark_utils.get_tables": {"name": "get_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_tables", "macro_sql": "{% macro get_tables(table_regex_pattern='.*') %}\n\n {% set tables = [] %}\n {% for database in spark__list_schemas('not_used') %}\n {% for table in spark__list_relations_without_caching(database[0]) %}\n {% set db_tablename = database[0] ~ \".\" ~ table[1] %}\n {% set is_match = modules.re.match(table_regex_pattern, db_tablename) %}\n {% if is_match %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('type', 'TYPE', 'Type'))|first %}\n {% if table_type[1]|lower != 'view' %}\n {{ tables.append(db_tablename) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% endfor %}\n {{ return(tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8320699, "supported_languages": null}, "macro.spark_utils.get_delta_tables": {"name": "get_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_delta_tables", "macro_sql": "{% macro get_delta_tables(table_regex_pattern='.*') %}\n\n {% set delta_tables = [] %}\n {% for db_tablename in get_tables(table_regex_pattern) %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('provider', 'PROVIDER', 'Provider'))|first %}\n {% if table_type[1]|lower == 'delta' %}\n {{ delta_tables.append(db_tablename) }}\n {% endif %}\n {% endfor %}\n {{ return(delta_tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.832721, "supported_languages": null}, "macro.spark_utils.get_statistic_columns": {"name": "get_statistic_columns", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_statistic_columns", "macro_sql": "{% macro get_statistic_columns(table) %}\n\n {% call statement('input_columns', fetch_result=True) %}\n SHOW COLUMNS IN {{ table }}\n {% endcall %}\n {% set input_columns = load_result('input_columns').table %}\n\n {% set output_columns = [] %}\n {% for column in input_columns %}\n {% call statement('column_information', fetch_result=True) %}\n DESCRIBE TABLE {{ table }} `{{ column[0] }}`\n {% endcall %}\n {% if not load_result('column_information').table[1][1].startswith('struct') and not load_result('column_information').table[1][1].startswith('array') %}\n {{ output_columns.append('`' ~ column[0] ~ '`') }}\n {% endif %}\n {% endfor %}\n {{ return(output_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.833547, "supported_languages": null}, "macro.spark_utils.spark_optimize_delta_tables": {"name": "spark_optimize_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_optimize_delta_tables", "macro_sql": "{% macro spark_optimize_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Optimizing \" ~ table) }}\n {% do run_query(\"optimize \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.834228, "supported_languages": null}, "macro.spark_utils.spark_vacuum_delta_tables": {"name": "spark_vacuum_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_vacuum_delta_tables", "macro_sql": "{% macro spark_vacuum_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Vacuuming \" ~ table) }}\n {% do run_query(\"vacuum \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.83492, "supported_languages": null}, "macro.spark_utils.spark_analyze_tables": {"name": "spark_analyze_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_analyze_tables", "macro_sql": "{% macro spark_analyze_tables(table_regex_pattern='.*') %}\n\n {% for table in get_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set columns = get_statistic_columns(table) | join(',') %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Analyzing \" ~ table) }}\n {% if columns != '' %}\n {% do run_query(\"analyze table \" ~ table ~ \" compute statistics for columns \" ~ columns) %}\n {% endif %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.spark_utils.get_statistic_columns", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8359392, "supported_languages": null}, "macro.spark_utils.spark__concat": {"name": "spark__concat", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/concat.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/concat.sql", "unique_id": "macro.spark_utils.spark__concat", "macro_sql": "{% macro spark__concat(fields) -%}\n concat({{ fields|join(', ') }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.836128, "supported_languages": null}, "macro.spark_utils.spark__type_numeric": {"name": "spark__type_numeric", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "unique_id": "macro.spark_utils.spark__type_numeric", "macro_sql": "{% macro spark__type_numeric() %}\n decimal(28, 6)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.836239, "supported_languages": null}, "macro.spark_utils.spark__dateadd": {"name": "spark__dateadd", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "unique_id": "macro.spark_utils.spark__dateadd", "macro_sql": "{% macro spark__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {%- set clock_component -%}\n {# make sure the dates + timestamps are real, otherwise raise an error asap #}\n to_unix_timestamp({{ spark_utils.assert_not_null('to_timestamp', from_date_or_timestamp) }})\n - to_unix_timestamp({{ spark_utils.assert_not_null('date', from_date_or_timestamp) }})\n {%- endset -%}\n\n {%- if datepart in ['day', 'week'] -%}\n \n {%- set multiplier = 7 if datepart == 'week' else 1 -%}\n\n to_timestamp(\n to_unix_timestamp(\n date_add(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ['month', 'quarter', 'year'] -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'month' -%} 1\n {%- elif datepart == 'quarter' -%} 3\n {%- elif datepart == 'year' -%} 12\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n to_unix_timestamp(\n add_months(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n {{ spark_utils.assert_not_null('to_unix_timestamp', from_date_or_timestamp) }}\n + cast({{interval}} * {{multiplier}} as int)\n )\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro dateadd not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.839016, "supported_languages": null}, "macro.spark_utils.spark__datediff": {"name": "spark__datediff", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datediff.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datediff.sql", "unique_id": "macro.spark_utils.spark__datediff", "macro_sql": "{% macro spark__datediff(first_date, second_date, datepart) %}\n\n {%- if datepart in ['day', 'week', 'month', 'quarter', 'year'] -%}\n \n {# make sure the dates are real, otherwise raise an error asap #}\n {% set first_date = spark_utils.assert_not_null('date', first_date) %}\n {% set second_date = spark_utils.assert_not_null('date', second_date) %}\n \n {%- endif -%}\n \n {%- if datepart == 'day' -%}\n \n datediff({{second_date}}, {{first_date}})\n \n {%- elif datepart == 'week' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(datediff({{second_date}}, {{first_date}})/7)\n else ceil(datediff({{second_date}}, {{first_date}})/7)\n end\n \n -- did we cross a week boundary (Sunday)?\n + case\n when {{first_date}} < {{second_date}} and dayofweek({{second_date}}) < dayofweek({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofweek({{second_date}}) > dayofweek({{first_date}}) then -1\n else 0 end\n\n {%- elif datepart == 'month' -%}\n\n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}})))\n else ceil(months_between(date({{second_date}}), date({{first_date}})))\n end\n \n -- did we cross a month boundary?\n + case\n when {{first_date}} < {{second_date}} and dayofmonth({{second_date}}) < dayofmonth({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofmonth({{second_date}}) > dayofmonth({{first_date}}) then -1\n else 0 end\n \n {%- elif datepart == 'quarter' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}}))/3)\n else ceil(months_between(date({{second_date}}), date({{first_date}}))/3)\n end\n \n -- did we cross a quarter boundary?\n + case\n when {{first_date}} < {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n < (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then 1\n when {{first_date}} > {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n > (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then -1\n else 0 end\n\n {%- elif datepart == 'year' -%}\n \n year({{second_date}}) - year({{first_date}})\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set divisor -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n case when {{first_date}} < {{second_date}}\n then ceil((\n {# make sure the timestamps are real, otherwise raise an error asap #}\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n else floor((\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n end\n \n {% if datepart == 'millisecond' %}\n + cast(date_format({{second_date}}, 'SSS') as int)\n - cast(date_format({{first_date}}, 'SSS') as int)\n {% endif %}\n \n {% if datepart == 'microsecond' %} \n {% set capture_str = '[0-9]{4}-[0-9]{2}-[0-9]{2}.[0-9]{2}:[0-9]{2}:[0-9]{2}.([0-9]{6})' %}\n -- Spark doesn't really support microseconds, so this is a massive hack!\n -- It will only work if the timestamp-string is of the format\n -- 'yyyy-MM-dd-HH mm.ss.SSSSSS'\n + cast(regexp_extract({{second_date}}, '{{capture_str}}', 1) as int)\n - cast(regexp_extract({{first_date}}, '{{capture_str}}', 1) as int) \n {% endif %}\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro datediff not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8466408, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp": {"name": "spark__current_timestamp", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp", "macro_sql": "{% macro spark__current_timestamp() %}\n current_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.846827, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp_in_utc": {"name": "spark__current_timestamp_in_utc", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp_in_utc", "macro_sql": "{% macro spark__current_timestamp_in_utc() %}\n unix_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8469079, "supported_languages": null}, "macro.spark_utils.spark__split_part": {"name": "spark__split_part", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/split_part.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/split_part.sql", "unique_id": "macro.spark_utils.spark__split_part", "macro_sql": "{% macro spark__split_part(string_text, delimiter_text, part_number) %}\n\n {% set delimiter_expr %}\n \n -- escape if starts with a special character\n case when regexp_extract({{ delimiter_text }}, '([^A-Za-z0-9])(.*)', 1) != '_'\n then concat('\\\\', {{ delimiter_text }})\n else {{ delimiter_text }} end\n \n {% endset %}\n\n {% set split_part_expr %}\n \n split(\n {{ string_text }},\n {{ delimiter_expr }}\n )[({{ part_number - 1 }})]\n \n {% endset %}\n \n {{ return(split_part_expr) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.847552, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_pattern": {"name": "spark__get_relations_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_pattern", "macro_sql": "{% macro spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n show table extended in {{ schema_pattern }} like '{{ table_pattern }}'\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=None,\n schema=row[0],\n identifier=row[1],\n type=('view' if 'Type: VIEW' in row[3] else 'table')\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.849108, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_prefix": {"name": "spark__get_relations_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_prefix", "macro_sql": "{% macro spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {% set table_pattern = table_pattern ~ '*' %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8494332, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_pattern": {"name": "spark__get_tables_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_pattern", "macro_sql": "{% macro spark__get_tables_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.849695, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_prefix": {"name": "spark__get_tables_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_prefix", "macro_sql": "{% macro spark__get_tables_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.849953, "supported_languages": null}, "macro.spark_utils.assert_not_null": {"name": "assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.assert_not_null", "macro_sql": "{% macro assert_not_null(function, arg) -%}\n {{ return(adapter.dispatch('assert_not_null', 'spark_utils')(function, arg)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.spark_utils.default__assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.850261, "supported_languages": null}, "macro.spark_utils.default__assert_not_null": {"name": "default__assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.default__assert_not_null", "macro_sql": "{% macro default__assert_not_null(function, arg) %}\n\n coalesce({{function}}({{arg}}), nvl2({{function}}({{arg}}), assert_true({{function}}({{arg}}) is not null), null))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8504488, "supported_languages": null}, "macro.spark_utils.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/snowplow/convert_timezone.sql", "original_file_path": "macros/snowplow/convert_timezone.sql", "unique_id": "macro.spark_utils.spark__convert_timezone", "macro_sql": "{% macro spark__convert_timezone(in_tz, out_tz, in_timestamp) %}\n from_utc_timestamp(to_utc_timestamp({{in_timestamp}}, {{in_tz}}), {{out_tz}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.850642, "supported_languages": null}, "macro.dbt_date.get_date_dimension": {"name": "get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.get_date_dimension", "macro_sql": "{% macro get_date_dimension(start_date, end_date) %}\n {{ adapter.dispatch('get_date_dimension', 'dbt_date') (start_date, end_date) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__get_date_dimension"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.860064, "supported_languages": null}, "macro.dbt_date.default__get_date_dimension": {"name": "default__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.default__get_date_dimension", "macro_sql": "{% macro default__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=false) }} as day_of_week,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week_iso,\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n cast({{ last_day('d.date_day', 'quarter') }} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.862253, "supported_languages": null}, "macro.dbt_date.postgres__get_date_dimension": {"name": "postgres__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.postgres__get_date_dimension", "macro_sql": "{% macro postgres__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week,\n\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n {# last_day does not support quarter because postgresql does not support quarter interval. #}\n cast({{dbt.dateadd('day', '-1', dbt.dateadd('month', '3', dbt.date_trunc('quarter', 'd.date_day')))}} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8644822, "supported_languages": null}, "macro.dbt_date.get_base_dates": {"name": "get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.get_base_dates", "macro_sql": "{% macro get_base_dates(start_date=None, end_date=None, n_dateparts=None, datepart=\"day\") %}\n {{ adapter.dispatch('get_base_dates', 'dbt_date') (start_date, end_date, n_dateparts, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_base_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8661911, "supported_languages": null}, "macro.dbt_date.default__get_base_dates": {"name": "default__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.default__get_base_dates", "macro_sql": "{% macro default__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.866901, "supported_languages": null}, "macro.dbt_date.bigquery__get_base_dates": {"name": "bigquery__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.bigquery__get_base_dates", "macro_sql": "{% macro bigquery__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as datetime )\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as datetime )\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.867546, "supported_languages": null}, "macro.dbt_date.trino__get_base_dates": {"name": "trino__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.trino__get_base_dates", "macro_sql": "{% macro trino__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.now()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.now", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.868253, "supported_languages": null}, "macro.dbt_date.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_date')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.868952, "supported_languages": null}, "macro.dbt_date.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.869515, "supported_languages": null}, "macro.dbt_date.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_date')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8697338, "supported_languages": null}, "macro.dbt_date.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{\n dbt_date.generate_series(\n dbt_date.get_intervals_between(start_date, end_date, datepart)\n )\n }}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"(row_number() over (order by 1) - 1)\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.generate_series", "macro.dbt_date.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.87008, "supported_languages": null}, "macro.dbt_date.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.870896, "supported_languages": null}, "macro.dbt_date.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.871302, "supported_languages": null}, "macro.dbt_date.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.871511, "supported_languages": null}, "macro.dbt_date.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_date.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8720498, "supported_languages": null}, "macro.dbt_date.date": {"name": "date", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.date", "macro_sql": "{% macro date(year, month, day) %}\n {{ return(modules.datetime.date(year, month, day)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.872381, "supported_languages": null}, "macro.dbt_date.datetime": {"name": "datetime", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.datetime", "macro_sql": "{% macro datetime(year, month, day, hour=0, minute=0, second=0, microsecond=0, tz=None) %}\n {% set tz = tz if tz else var(\"dbt_date:time_zone\") %}\n {{ return(\n modules.datetime.datetime(\n year=year, month=month, day=day, hour=hour,\n minute=minute, second=second, microsecond=microsecond,\n tzinfo=modules.pytz.timezone(tz)\n )\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.87291, "supported_languages": null}, "macro.dbt_date.get_fiscal_year_dates": {"name": "get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.get_fiscal_year_dates", "macro_sql": "{% macro get_fiscal_year_dates(dates, year_end_month=12, week_start_day=1, shift_year=1) %}\n{{ adapter.dispatch('get_fiscal_year_dates', 'dbt_date') (dates, year_end_month, week_start_day, shift_year) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_fiscal_year_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.874751, "supported_languages": null}, "macro.dbt_date.default__get_fiscal_year_dates": {"name": "default__get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.default__get_fiscal_year_dates", "macro_sql": "{% macro default__get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) %}\n-- this gets all the dates within a fiscal year\n-- determined by the given year-end-month\n-- ending on the saturday closest to that month's end date\nwith fsc_date_dimension as (\n select * from {{ dates }}\n),\nyear_month_end as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.month_end_date\n from\n fsc_date_dimension d\n where\n d.month_of_year = {{ year_end_month }}\n group by 1,2\n\n),\nweeks as (\n\n select\n d.year_number,\n d.month_of_year,\n d.date_day as week_start_date,\n cast({{ dbt.dateadd('day', 6, 'd.date_day') }} as date) as week_end_date\n from\n fsc_date_dimension d\n where\n d.day_of_week = {{ week_start_day }}\n\n),\n-- get all the weeks that start in the month the year ends\nyear_week_ends as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.week_end_date\n from\n weeks d\n where\n d.month_of_year = {{ year_end_month }}\n group by\n 1,2\n\n),\n-- then calculate which Saturday is closest to month end\nweeks_at_month_end as (\n\n select\n d.fiscal_year_number,\n d.week_end_date,\n m.month_end_date,\n rank() over\n (partition by d.fiscal_year_number\n order by\n abs({{ dbt.datediff('d.week_end_date', 'm.month_end_date', 'day') }})\n\n ) as closest_to_month_end\n from\n year_week_ends d\n join\n year_month_end m on d.fiscal_year_number = m.fiscal_year_number\n),\nfiscal_year_range as (\n\n select\n w.fiscal_year_number,\n cast(\n {{ dbt.dateadd('day', 1,\n 'lag(w.week_end_date) over(order by w.week_end_date)') }}\n as date) as fiscal_year_start_date,\n w.week_end_date as fiscal_year_end_date\n from\n weeks_at_month_end w\n where\n w.closest_to_month_end = 1\n\n),\nfiscal_year_dates as (\n\n select\n d.date_day,\n m.fiscal_year_number,\n m.fiscal_year_start_date,\n m.fiscal_year_end_date,\n w.week_start_date,\n w.week_end_date,\n -- we reset the weeks of the year starting with the merch year start date\n dense_rank()\n over(\n partition by m.fiscal_year_number\n order by w.week_start_date\n ) as fiscal_week_of_year\n from\n fsc_date_dimension d\n join\n fiscal_year_range m on d.date_day between m.fiscal_year_start_date and m.fiscal_year_end_date\n join\n weeks w on d.date_day between w.week_start_date and w.week_end_date\n\n)\nselect * from fiscal_year_dates order by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.875333, "supported_languages": null}, "macro.dbt_date.get_fiscal_periods": {"name": "get_fiscal_periods", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_periods.sql", "original_file_path": "macros/fiscal_date/get_fiscal_periods.sql", "unique_id": "macro.dbt_date.get_fiscal_periods", "macro_sql": "{% macro get_fiscal_periods(dates, year_end_month, week_start_day, shift_year=1) %}\n{#\nThis macro requires you to pass in a ref to a date dimension, created via\ndbt_date.get_date_dimension()s\n#}\nwith fscl_year_dates_for_periods as (\n {{ dbt_date.get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) }}\n),\nfscl_year_w13 as (\n\n select\n f.*,\n -- We count the weeks in a 13 week period\n -- and separate the 4-5-4 week sequences\n mod(cast(\n (f.fiscal_week_of_year-1) as {{ dbt.type_int() }}\n ), 13) as w13_number,\n -- Chop weeks into 13 week merch quarters\n cast(\n least(\n floor((f.fiscal_week_of_year-1)/13.0)\n , 3)\n as {{ dbt.type_int() }}) as quarter_number\n from\n fscl_year_dates_for_periods f\n\n),\nfscl_periods as (\n\n select\n f.date_day,\n f.fiscal_year_number,\n f.week_start_date,\n f.week_end_date,\n f.fiscal_week_of_year,\n case\n -- we move week 53 into the 3rd period of the quarter\n when f.fiscal_week_of_year = 53 then 3\n when f.w13_number between 0 and 3 then 1\n when f.w13_number between 4 and 8 then 2\n when f.w13_number between 9 and 12 then 3\n end as period_of_quarter,\n f.quarter_number\n from\n fscl_year_w13 f\n\n),\nfscl_periods_quarters as (\n\n select\n f.*,\n cast((\n (f.quarter_number * 3) + f.period_of_quarter\n ) as {{ dbt.type_int() }}) as fiscal_period_number\n from\n fscl_periods f\n\n)\nselect\n date_day,\n fiscal_year_number,\n week_start_date,\n week_end_date,\n fiscal_week_of_year,\n dense_rank() over(partition by fiscal_period_number order by fiscal_week_of_year) as fiscal_week_of_period,\n fiscal_period_number,\n quarter_number+1 as fiscal_quarter_number,\n period_of_quarter as fiscal_period_of_quarter\nfrom\n fscl_periods_quarters\norder by 1,2\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_fiscal_year_dates", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.876279, "supported_languages": null}, "macro.dbt_date.tomorrow": {"name": "tomorrow", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/tomorrow.sql", "original_file_path": "macros/calendar_date/tomorrow.sql", "unique_id": "macro.dbt_date.tomorrow", "macro_sql": "{%- macro tomorrow(date=None, tz=None) -%}\n{{ dbt_date.n_days_away(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.876489, "supported_languages": null}, "macro.dbt_date.next_week": {"name": "next_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_week.sql", "original_file_path": "macros/calendar_date/next_week.sql", "unique_id": "macro.dbt_date.next_week", "macro_sql": "{%- macro next_week(tz=None) -%}\n{{ dbt_date.n_weeks_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.876662, "supported_languages": null}, "macro.dbt_date.next_month_name": {"name": "next_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_name.sql", "original_file_path": "macros/calendar_date/next_month_name.sql", "unique_id": "macro.dbt_date.next_month_name", "macro_sql": "{%- macro next_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.next_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.876957, "supported_languages": null}, "macro.dbt_date.next_month": {"name": "next_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month.sql", "original_file_path": "macros/calendar_date/next_month.sql", "unique_id": "macro.dbt_date.next_month", "macro_sql": "{%- macro next_month(tz=None) -%}\n{{ dbt_date.n_months_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8771358, "supported_languages": null}, "macro.dbt_date.day_name": {"name": "day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.day_name", "macro_sql": "{%- macro day_name(date, short=True) -%}\n {{ adapter.dispatch('day_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.878046, "supported_languages": null}, "macro.dbt_date.default__day_name": {"name": "default__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.default__day_name", "macro_sql": "\n\n{%- macro default__day_name(date, short) -%}\n{%- set f = 'Dy' if short else 'Day' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8782551, "supported_languages": null}, "macro.dbt_date.snowflake__day_name": {"name": "snowflake__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.snowflake__day_name", "macro_sql": "\n\n{%- macro snowflake__day_name(date, short) -%}\n {%- if short -%}\n dayname({{ date }})\n {%- else -%}\n -- long version not implemented on Snowflake so we're doing it manually :/\n case dayname({{ date }})\n when 'Mon' then 'Monday'\n when 'Tue' then 'Tuesday'\n when 'Wed' then 'Wednesday'\n when 'Thu' then 'Thursday'\n when 'Fri' then 'Friday'\n when 'Sat' then 'Saturday'\n when 'Sun' then 'Sunday'\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.87846, "supported_languages": null}, "macro.dbt_date.bigquery__day_name": {"name": "bigquery__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.bigquery__day_name", "macro_sql": "\n\n{%- macro bigquery__day_name(date, short) -%}\n{%- set f = '%a' if short else '%A' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.878648, "supported_languages": null}, "macro.dbt_date.postgres__day_name": {"name": "postgres__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.postgres__day_name", "macro_sql": "\n\n{%- macro postgres__day_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMDy' if short else 'FMDay' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.878887, "supported_languages": null}, "macro.dbt_date.duckdb__day_name": {"name": "duckdb__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.duckdb__day_name", "macro_sql": "\n\n{%- macro duckdb__day_name(date, short) -%}\n {%- if short -%}\n substr(dayname({{ date }}), 1, 3)\n {%- else -%}\n dayname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.879074, "supported_languages": null}, "macro.dbt_date.spark__day_name": {"name": "spark__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.spark__day_name", "macro_sql": "\n\n{%- macro spark__day_name(date, short) -%}\n{%- set f = 'E' if short else 'EEEE' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8792698, "supported_languages": null}, "macro.dbt_date.trino__day_name": {"name": "trino__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.trino__day_name", "macro_sql": "\n\n{%- macro trino__day_name(date, short) -%}\n{%- set f = 'a' if short else 'W' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8794649, "supported_languages": null}, "macro.dbt_date.to_unixtimestamp": {"name": "to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.to_unixtimestamp", "macro_sql": "{%- macro to_unixtimestamp(timestamp) -%}\n {{ adapter.dispatch('to_unixtimestamp', 'dbt_date') (timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__to_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.879862, "supported_languages": null}, "macro.dbt_date.default__to_unixtimestamp": {"name": "default__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__to_unixtimestamp", "macro_sql": "\n\n{%- macro default__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8799958, "supported_languages": null}, "macro.dbt_date.snowflake__to_unixtimestamp": {"name": "snowflake__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__to_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch_seconds', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8801322, "supported_languages": null}, "macro.dbt_date.bigquery__to_unixtimestamp": {"name": "bigquery__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__to_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__to_unixtimestamp(timestamp) -%}\n unix_seconds({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.880233, "supported_languages": null}, "macro.dbt_date.spark__to_unixtimestamp": {"name": "spark__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.spark__to_unixtimestamp", "macro_sql": "\n\n{%- macro spark__to_unixtimestamp(timestamp) -%}\n unix_timestamp({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.880333, "supported_languages": null}, "macro.dbt_date.trino__to_unixtimestamp": {"name": "trino__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__to_unixtimestamp", "macro_sql": "\n\n{%- macro trino__to_unixtimestamp(timestamp) -%}\n to_unixtime({{ timestamp }} AT TIME ZONE 'UTC')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8804321, "supported_languages": null}, "macro.dbt_date.n_days_away": {"name": "n_days_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_away.sql", "original_file_path": "macros/calendar_date/n_days_away.sql", "unique_id": "macro.dbt_date.n_days_away", "macro_sql": "{%- macro n_days_away(n, date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(-1 * n, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.880662, "supported_languages": null}, "macro.dbt_date.week_start": {"name": "week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.week_start", "macro_sql": "{%- macro week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.881141, "supported_languages": null}, "macro.dbt_date.default__week_start": {"name": "default__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.default__week_start", "macro_sql": "{%- macro default__week_start(date) -%}\ncast({{ dbt.date_trunc('week', date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.881282, "supported_languages": null}, "macro.dbt_date.snowflake__week_start": {"name": "snowflake__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.snowflake__week_start", "macro_sql": "\n\n{%- macro snowflake__week_start(date) -%}\n {#\n Get the day of week offset: e.g. if the date is a Sunday,\n dbt_date.day_of_week returns 1, so we subtract 1 to get a 0 offset\n #}\n {% set off_set = dbt_date.day_of_week(date, isoweek=False) ~ \" - 1\" %}\n cast({{ dbt.dateadd(\"day\", \"-1 * (\" ~ off_set ~ \")\", date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.day_of_week", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8816268, "supported_languages": null}, "macro.dbt_date.postgres__week_start": {"name": "postgres__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.postgres__week_start", "macro_sql": "\n\n{%- macro postgres__week_start(date) -%}\n-- Sunday as week start date\ncast({{ dbt.dateadd('day', -1, dbt.date_trunc('week', dbt.dateadd('day', 1, date))) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.881875, "supported_languages": null}, "macro.dbt_date.duckdb__week_start": {"name": "duckdb__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.duckdb__week_start", "macro_sql": "\n\n{%- macro duckdb__week_start(date) -%}\n{{ return(dbt_date.postgres__week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.882015, "supported_languages": null}, "macro.dbt_date.iso_week_start": {"name": "iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.iso_week_start", "macro_sql": "{%- macro iso_week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8826492, "supported_languages": null}, "macro.dbt_date._iso_week_start": {"name": "_iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date._iso_week_start", "macro_sql": "{%- macro _iso_week_start(date, week_type) -%}\ncast({{ dbt.date_trunc(week_type, date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8828, "supported_languages": null}, "macro.dbt_date.default__iso_week_start": {"name": "default__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.default__iso_week_start", "macro_sql": "\n\n{%- macro default__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.882934, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_start": {"name": "snowflake__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_start", "macro_sql": "\n\n{%- macro snowflake__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.883064, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_start": {"name": "postgres__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.postgres__iso_week_start", "macro_sql": "\n\n{%- macro postgres__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8831942, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_start": {"name": "duckdb__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_start", "macro_sql": "\n\n{%- macro duckdb__iso_week_start(date) -%}\n{{ return(dbt_date.postgres__iso_week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.88333, "supported_languages": null}, "macro.dbt_date.spark__iso_week_start": {"name": "spark__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.spark__iso_week_start", "macro_sql": "\n\n{%- macro spark__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8834848, "supported_languages": null}, "macro.dbt_date.trino__iso_week_start": {"name": "trino__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.trino__iso_week_start", "macro_sql": "\n\n{%- macro trino__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.883643, "supported_languages": null}, "macro.dbt_date.n_days_ago": {"name": "n_days_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_ago.sql", "original_file_path": "macros/calendar_date/n_days_ago.sql", "unique_id": "macro.dbt_date.n_days_ago", "macro_sql": "{%- macro n_days_ago(n, date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{%- set n = n|int -%}\ncast({{ dbt.dateadd('day', -1 * n, dt) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.884068, "supported_languages": null}, "macro.dbt_date.last_week": {"name": "last_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_week.sql", "original_file_path": "macros/calendar_date/last_week.sql", "unique_id": "macro.dbt_date.last_week", "macro_sql": "{%- macro last_week(tz=None) -%}\n{{ dbt_date.n_weeks_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.884253, "supported_languages": null}, "macro.dbt_date.now": {"name": "now", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/now.sql", "original_file_path": "macros/calendar_date/now.sql", "unique_id": "macro.dbt_date.now", "macro_sql": "{%- macro now(tz=None) -%}\n{{ dbt_date.convert_timezone(dbt.current_timestamp(), tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.convert_timezone", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.884439, "supported_languages": null}, "macro.dbt_date.periods_since": {"name": "periods_since", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/periods_since.sql", "original_file_path": "macros/calendar_date/periods_since.sql", "unique_id": "macro.dbt_date.periods_since", "macro_sql": "{%- macro periods_since(date_col, period_name='day', tz=None) -%}\n{{ dbt.datediff(date_col, dbt_date.now(tz), period_name) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.884686, "supported_languages": null}, "macro.dbt_date.today": {"name": "today", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/today.sql", "original_file_path": "macros/calendar_date/today.sql", "unique_id": "macro.dbt_date.today", "macro_sql": "{%- macro today(tz=None) -%}\ncast({{ dbt_date.now(tz) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.884854, "supported_languages": null}, "macro.dbt_date.last_month": {"name": "last_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month.sql", "original_file_path": "macros/calendar_date/last_month.sql", "unique_id": "macro.dbt_date.last_month", "macro_sql": "{%- macro last_month(tz=None) -%}\n{{ dbt_date.n_months_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.885026, "supported_languages": null}, "macro.dbt_date.day_of_year": {"name": "day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.day_of_year", "macro_sql": "{%- macro day_of_year(date) -%}\n{{ adapter.dispatch('day_of_year', 'dbt_date') (date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8854249, "supported_languages": null}, "macro.dbt_date.default__day_of_year": {"name": "default__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.default__day_of_year", "macro_sql": "\n\n{%- macro default__day_of_year(date) -%}\n {{ dbt_date.date_part('dayofyear', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.885561, "supported_languages": null}, "macro.dbt_date.postgres__day_of_year": {"name": "postgres__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.postgres__day_of_year", "macro_sql": "\n\n{%- macro postgres__day_of_year(date) -%}\n {{ dbt_date.date_part('doy', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.885694, "supported_languages": null}, "macro.dbt_date.redshift__day_of_year": {"name": "redshift__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.redshift__day_of_year", "macro_sql": "\n\n{%- macro redshift__day_of_year(date) -%}\n cast({{ dbt_date.date_part('dayofyear', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.885868, "supported_languages": null}, "macro.dbt_date.spark__day_of_year": {"name": "spark__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.spark__day_of_year", "macro_sql": "\n\n{%- macro spark__day_of_year(date) -%}\n dayofyear({{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.886038, "supported_languages": null}, "macro.dbt_date.trino__day_of_year": {"name": "trino__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.trino__day_of_year", "macro_sql": "\n\n{%- macro trino__day_of_year(date) -%}\n {{ dbt_date.date_part('day_of_year', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8861709, "supported_languages": null}, "macro.dbt_date.round_timestamp": {"name": "round_timestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/round_timestamp.sql", "original_file_path": "macros/calendar_date/round_timestamp.sql", "unique_id": "macro.dbt_date.round_timestamp", "macro_sql": "{% macro round_timestamp(timestamp) %}\n {{ dbt.date_trunc(\"day\", dbt.dateadd(\"hour\", 12, timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8863978, "supported_languages": null}, "macro.dbt_date.from_unixtimestamp": {"name": "from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.from_unixtimestamp", "macro_sql": "{%- macro from_unixtimestamp(epochs, format=\"seconds\") -%}\n {{ adapter.dispatch('from_unixtimestamp', 'dbt_date') (epochs, format) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__from_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.889029, "supported_languages": null}, "macro.dbt_date.default__from_unixtimestamp": {"name": "default__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__from_unixtimestamp", "macro_sql": "\n\n{%- macro default__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp({{ epochs }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.889277, "supported_languages": null}, "macro.dbt_date.postgres__from_unixtimestamp": {"name": "postgres__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.postgres__from_unixtimestamp", "macro_sql": "\n\n{%- macro postgres__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.889569, "supported_languages": null}, "macro.dbt_date.snowflake__from_unixtimestamp": {"name": "snowflake__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__from_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n {%- set scale = 0 -%}\n {%- elif format == \"milliseconds\" -%}\n {%- set scale = 3 -%}\n {%- elif format == \"microseconds\" -%}\n {%- set scale = 6 -%}\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp_ntz({{ epochs }}, {{ scale }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.890081, "supported_languages": null}, "macro.dbt_date.bigquery__from_unixtimestamp": {"name": "bigquery__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__from_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n timestamp_seconds({{ epochs }})\n {%- elif format == \"milliseconds\" -%}\n timestamp_millis({{ epochs }})\n {%- elif format == \"microseconds\" -%}\n timestamp_micros({{ epochs }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8904572, "supported_languages": null}, "macro.dbt_date.trino__from_unixtimestamp": {"name": "trino__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__from_unixtimestamp", "macro_sql": "\n\n{%- macro trino__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n cast(from_unixtime({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"milliseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 6)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"microseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 3)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"nanoseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.891026, "supported_languages": null}, "macro.dbt_date.duckdb__from_unixtimestamp": {"name": "duckdb__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.duckdb__from_unixtimestamp", "macro_sql": "\n\n\n{%- macro duckdb__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.891274, "supported_languages": null}, "macro.dbt_date.n_months_ago": {"name": "n_months_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_ago.sql", "original_file_path": "macros/calendar_date/n_months_ago.sql", "unique_id": "macro.dbt_date.n_months_ago", "macro_sql": "{%- macro n_months_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8916311, "supported_languages": null}, "macro.dbt_date.date_part": {"name": "date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.date_part", "macro_sql": "{% macro date_part(datepart, date) -%}\n {{ adapter.dispatch('date_part', 'dbt_date') (datepart, date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8919628, "supported_languages": null}, "macro.dbt_date.default__date_part": {"name": "default__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.default__date_part", "macro_sql": "{% macro default__date_part(datepart, date) -%}\n date_part('{{ datepart }}', {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.89209, "supported_languages": null}, "macro.dbt_date.bigquery__date_part": {"name": "bigquery__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.bigquery__date_part", "macro_sql": "{% macro bigquery__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.892214, "supported_languages": null}, "macro.dbt_date.trino__date_part": {"name": "trino__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.trino__date_part", "macro_sql": "{% macro trino__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8923368, "supported_languages": null}, "macro.dbt_date.n_weeks_away": {"name": "n_weeks_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_away.sql", "original_file_path": "macros/calendar_date/n_weeks_away.sql", "unique_id": "macro.dbt_date.n_weeks_away", "macro_sql": "{%- macro n_weeks_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.892663, "supported_languages": null}, "macro.dbt_date.day_of_month": {"name": "day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.day_of_month", "macro_sql": "{%- macro day_of_month(date) -%}\n{{ dbt_date.date_part('day', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.892865, "supported_languages": null}, "macro.dbt_date.redshift__day_of_month": {"name": "redshift__day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.redshift__day_of_month", "macro_sql": "\n\n{%- macro redshift__day_of_month(date) -%}\ncast({{ dbt_date.date_part('day', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.893039, "supported_languages": null}, "macro.dbt_date.yesterday": {"name": "yesterday", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/yesterday.sql", "original_file_path": "macros/calendar_date/yesterday.sql", "unique_id": "macro.dbt_date.yesterday", "macro_sql": "{%- macro yesterday(date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8932421, "supported_languages": null}, "macro.dbt_date.day_of_week": {"name": "day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.day_of_week", "macro_sql": "{%- macro day_of_week(date, isoweek=true) -%}\n{{ adapter.dispatch('day_of_week', 'dbt_date') (date, isoweek) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8961349, "supported_languages": null}, "macro.dbt_date.default__day_of_week": {"name": "default__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.default__day_of_week", "macro_sql": "\n\n{%- macro default__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else {{ dow }}\n end\n {%- else -%}\n {{ dow }} + 1\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.896426, "supported_languages": null}, "macro.dbt_date.snowflake__day_of_week": {"name": "snowflake__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.snowflake__day_of_week", "macro_sql": "\n\n{%- macro snowflake__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'dayofweekiso' -%}\n {{ dbt_date.date_part(dow_part, date) }}\n {%- else -%}\n {%- set dow_part = 'dayofweek' -%}\n case\n when {{ dbt_date.date_part(dow_part, date) }} = 7 then 1\n else {{ dbt_date.date_part(dow_part, date) }} + 1\n end\n {%- endif -%}\n\n\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8968132, "supported_languages": null}, "macro.dbt_date.bigquery__day_of_week": {"name": "bigquery__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.bigquery__day_of_week", "macro_sql": "\n\n{%- macro bigquery__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (1) to Monday (2)\n when {{ dow }} = 1 then 7\n else {{ dow }} - 1\n end\n {%- else -%}\n {{ dow }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.89709, "supported_languages": null}, "macro.dbt_date.postgres__day_of_week": {"name": "postgres__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.postgres__day_of_week", "macro_sql": "\n\n\n{%- macro postgres__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'isodow' -%}\n -- Monday(1) to Sunday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} as {{ dbt.type_int() }})\n {%- else -%}\n {%- set dow_part = 'dow' -%}\n -- Sunday(1) to Saturday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} + 1 as {{ dbt.type_int() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.897486, "supported_languages": null}, "macro.dbt_date.redshift__day_of_week": {"name": "redshift__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.redshift__day_of_week", "macro_sql": "\n\n\n{%- macro redshift__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else cast({{ dow }} as {{ dbt.type_bigint() }})\n end\n {%- else -%}\n cast({{ dow }} + 1 as {{ dbt.type_bigint() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.897839, "supported_languages": null}, "macro.dbt_date.duckdb__day_of_week": {"name": "duckdb__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.duckdb__day_of_week", "macro_sql": "\n\n{%- macro duckdb__day_of_week(date, isoweek) -%}\n{{ return(dbt_date.postgres__day_of_week(date, isoweek)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.898004, "supported_languages": null}, "macro.dbt_date.spark__day_of_week": {"name": "spark__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.spark__day_of_week", "macro_sql": "\n\n\n{%- macro spark__day_of_week(date, isoweek) -%}\n\n {%- set dow = \"dayofweek_iso\" if isoweek else \"dayofweek\" -%}\n\n {{ dbt_date.date_part(dow, date) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.898208, "supported_languages": null}, "macro.dbt_date.trino__day_of_week": {"name": "trino__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.trino__day_of_week", "macro_sql": "\n\n\n{%- macro trino__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('day_of_week', date) -%}\n\n {%- if isoweek -%}\n {{ dow }}\n {%- else -%}\n case\n when {{ dow }} = 7 then 1\n else {{ dow }} + 1\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.898474, "supported_languages": null}, "macro.dbt_date.iso_week_end": {"name": "iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.iso_week_end", "macro_sql": "{%- macro iso_week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.default__iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8989172, "supported_languages": null}, "macro.dbt_date._iso_week_end": {"name": "_iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date._iso_week_end", "macro_sql": "{%- macro _iso_week_end(date, week_type) -%}\n{%- set dt = dbt_date.iso_week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.iso_week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.8991182, "supported_languages": null}, "macro.dbt_date.default__iso_week_end": {"name": "default__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.default__iso_week_end", "macro_sql": "\n\n{%- macro default__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.899254, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_end": {"name": "snowflake__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_end", "macro_sql": "\n\n{%- macro snowflake__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.899387, "supported_languages": null}, "macro.dbt_date.n_weeks_ago": {"name": "n_weeks_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_ago.sql", "original_file_path": "macros/calendar_date/n_weeks_ago.sql", "unique_id": "macro.dbt_date.n_weeks_ago", "macro_sql": "{%- macro n_weeks_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.89974, "supported_languages": null}, "macro.dbt_date.month_name": {"name": "month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.month_name", "macro_sql": "{%- macro month_name(date, short=True) -%}\n {{ adapter.dispatch('month_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__month_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.900453, "supported_languages": null}, "macro.dbt_date.default__month_name": {"name": "default__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.default__month_name", "macro_sql": "\n\n{%- macro default__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MONTH' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.900643, "supported_languages": null}, "macro.dbt_date.bigquery__month_name": {"name": "bigquery__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.bigquery__month_name", "macro_sql": "\n\n{%- macro bigquery__month_name(date, short) -%}\n{%- set f = '%b' if short else '%B' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9008389, "supported_languages": null}, "macro.dbt_date.snowflake__month_name": {"name": "snowflake__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.snowflake__month_name", "macro_sql": "\n\n{%- macro snowflake__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MMMM' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.901027, "supported_languages": null}, "macro.dbt_date.postgres__month_name": {"name": "postgres__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.postgres__month_name", "macro_sql": "\n\n{%- macro postgres__month_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMMon' if short else 'FMMonth' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.901222, "supported_languages": null}, "macro.dbt_date.duckdb__month_name": {"name": "duckdb__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.duckdb__month_name", "macro_sql": "\n\n\n{%- macro duckdb__month_name(date, short) -%}\n {%- if short -%}\n substr(monthname({{ date }}), 1, 3)\n {%- else -%}\n monthname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9013991, "supported_languages": null}, "macro.dbt_date.spark__month_name": {"name": "spark__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.spark__month_name", "macro_sql": "\n\n{%- macro spark__month_name(date, short) -%}\n{%- set f = 'MMM' if short else 'MMMM' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9016802, "supported_languages": null}, "macro.dbt_date.trino__month_name": {"name": "trino__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.trino__month_name", "macro_sql": "\n\n{%- macro trino__month_name(date, short) -%}\n{%- set f = 'b' if short else 'M' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9019089, "supported_languages": null}, "macro.dbt_date.last_month_name": {"name": "last_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_name.sql", "original_file_path": "macros/calendar_date/last_month_name.sql", "unique_id": "macro.dbt_date.last_month_name", "macro_sql": "{%- macro last_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.last_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9021802, "supported_languages": null}, "macro.dbt_date.week_of_year": {"name": "week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.week_of_year", "macro_sql": "{%- macro week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.902611, "supported_languages": null}, "macro.dbt_date.default__week_of_year": {"name": "default__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.default__week_of_year", "macro_sql": "{%- macro default__week_of_year(date) -%}\ncast({{ dbt_date.date_part('week', date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.902784, "supported_languages": null}, "macro.dbt_date.postgres__week_of_year": {"name": "postgres__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.postgres__week_of_year", "macro_sql": "\n\n{%- macro postgres__week_of_year(date) -%}\n{# postgresql 'week' returns isoweek. Use to_char instead.\n WW = the first week starts on the first day of the year #}\ncast(to_char({{ date }}, 'WW') as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9029272, "supported_languages": null}, "macro.dbt_date.duckdb__week_of_year": {"name": "duckdb__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__week_of_year", "macro_sql": "\n\n{%- macro duckdb__week_of_year(date) -%}\ncast(ceil(dayofyear({{ date }}) / 7) as int)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9030259, "supported_languages": null}, "macro.dbt_date.convert_timezone": {"name": "convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.convert_timezone", "macro_sql": "{%- macro convert_timezone(column, target_tz=None, source_tz=None) -%}\n{%- set source_tz = \"UTC\" if not source_tz else source_tz -%}\n{%- set target_tz = var(\"dbt_date:time_zone\") if not target_tz else target_tz -%}\n{{ adapter.dispatch('convert_timezone', 'dbt_date') (column, target_tz, source_tz) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.postgres__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.904261, "supported_languages": null}, "macro.dbt_date.default__convert_timezone": {"name": "default__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.default__convert_timezone", "macro_sql": "{% macro default__convert_timezone(column, target_tz, source_tz) -%}\nconvert_timezone('{{ source_tz }}', '{{ target_tz }}',\n cast({{ column }} as {{ dbt.type_timestamp() }})\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.904453, "supported_languages": null}, "macro.dbt_date.bigquery__convert_timezone": {"name": "bigquery__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.bigquery__convert_timezone", "macro_sql": "{%- macro bigquery__convert_timezone(column, target_tz, source_tz=None) -%}\ntimestamp(datetime({{ column }}, '{{ target_tz}}'))\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.904597, "supported_languages": null}, "macro.dbt_date.postgres__convert_timezone": {"name": "postgres__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.postgres__convert_timezone", "macro_sql": "{% macro postgres__convert_timezone(column, target_tz, source_tz) -%}\ncast(\n cast({{ column }} as {{ dbt.type_timestamp() }})\n at time zone '{{ source_tz }}' at time zone '{{ target_tz }}' as {{ dbt.type_timestamp() }}\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9048152, "supported_languages": null}, "macro.dbt_date.redshift__convert_timezone": {"name": "redshift__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.redshift__convert_timezone", "macro_sql": "{%- macro redshift__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.default__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.default__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9049962, "supported_languages": null}, "macro.dbt_date.duckdb__convert_timezone": {"name": "duckdb__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.duckdb__convert_timezone", "macro_sql": "{% macro duckdb__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.postgres__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.postgres__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9051719, "supported_languages": null}, "macro.dbt_date.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.spark__convert_timezone", "macro_sql": "{%- macro spark__convert_timezone(column, target_tz, source_tz) -%}\nfrom_utc_timestamp(\n to_utc_timestamp({{ column }}, '{{ source_tz }}'),\n '{{ target_tz }}'\n )\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.905329, "supported_languages": null}, "macro.dbt_date.trino__convert_timezone": {"name": "trino__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.trino__convert_timezone", "macro_sql": "{%- macro trino__convert_timezone(column, target_tz, source_tz) -%}\n cast((at_timezone(with_timezone(cast({{ column }} as {{ dbt.type_timestamp() }}), '{{ source_tz }}'), '{{ target_tz }}')) as {{ dbt.type_timestamp() }})\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9055638, "supported_languages": null}, "macro.dbt_date.n_months_away": {"name": "n_months_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_away.sql", "original_file_path": "macros/calendar_date/n_months_away.sql", "unique_id": "macro.dbt_date.n_months_away", "macro_sql": "{%- macro n_months_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.905896, "supported_languages": null}, "macro.dbt_date.iso_week_of_year": {"name": "iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.iso_week_of_year", "macro_sql": "{%- macro iso_week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.906583, "supported_languages": null}, "macro.dbt_date._iso_week_of_year": {"name": "_iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date._iso_week_of_year", "macro_sql": "{%- macro _iso_week_of_year(date, week_type) -%}\ncast({{ dbt_date.date_part(week_type, date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.906762, "supported_languages": null}, "macro.dbt_date.default__iso_week_of_year": {"name": "default__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.default__iso_week_of_year", "macro_sql": "\n\n{%- macro default__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.906893, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_of_year": {"name": "snowflake__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_of_year", "macro_sql": "\n\n{%- macro snowflake__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.90703, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_of_year": {"name": "postgres__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.postgres__iso_week_of_year", "macro_sql": "\n\n{%- macro postgres__iso_week_of_year(date) -%}\n-- postgresql week is isoweek, the first week of a year containing January 4 of that year.\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.907237, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_of_year": {"name": "duckdb__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_of_year", "macro_sql": "\n\n{%- macro duckdb__iso_week_of_year(date) -%}\n{{ return(dbt_date.postgres__iso_week_of_year(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.907377, "supported_languages": null}, "macro.dbt_date.spark__iso_week_of_year": {"name": "spark__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.spark__iso_week_of_year", "macro_sql": "\n\n{%- macro spark__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.907512, "supported_languages": null}, "macro.dbt_date.trino__iso_week_of_year": {"name": "trino__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.trino__iso_week_of_year", "macro_sql": "\n\n{%- macro trino__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.907669, "supported_languages": null}, "macro.dbt_date.week_end": {"name": "week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.week_end", "macro_sql": "{%- macro week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.908223, "supported_languages": null}, "macro.dbt_date.default__week_end": {"name": "default__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.default__week_end", "macro_sql": "{%- macro default__week_end(date) -%}\n{{ last_day(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.90835, "supported_languages": null}, "macro.dbt_date.snowflake__week_end": {"name": "snowflake__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.snowflake__week_end", "macro_sql": "\n\n{%- macro snowflake__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9085388, "supported_languages": null}, "macro.dbt_date.postgres__week_end": {"name": "postgres__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.postgres__week_end", "macro_sql": "\n\n{%- macro postgres__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.908731, "supported_languages": null}, "macro.dbt_date.duckdb__week_end": {"name": "duckdb__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.duckdb__week_end", "macro_sql": "\n\n{%- macro duckdb__week_end(date) -%}\n{{ return(dbt_date.postgres__week_end(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9088662, "supported_languages": null}, "macro.dbt_date.next_month_number": {"name": "next_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_number.sql", "original_file_path": "macros/calendar_date/next_month_number.sql", "unique_id": "macro.dbt_date.next_month_number", "macro_sql": "{%- macro next_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.next_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.909072, "supported_languages": null}, "macro.dbt_date.last_month_number": {"name": "last_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_number.sql", "original_file_path": "macros/calendar_date/last_month_number.sql", "unique_id": "macro.dbt_date.last_month_number", "macro_sql": "{%- macro last_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.last_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9092731, "supported_languages": null}, "macro.fivetran_utils.enabled_vars": {"name": "enabled_vars", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars.sql", "original_file_path": "macros/enabled_vars.sql", "unique_id": "macro.fivetran_utils.enabled_vars", "macro_sql": "{% macro enabled_vars(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, True) == False %}\n {{ return(False) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(True) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9096441, "supported_languages": null}, "macro.fivetran_utils.percentile": {"name": "percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.percentile", "macro_sql": "{% macro percentile(percentile_field, partition_field, percent) -%}\n\n{{ adapter.dispatch('percentile', 'fivetran_utils') (percentile_field, partition_field, percent) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__percentile"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9105809, "supported_languages": null}, "macro.fivetran_utils.default__percentile": {"name": "default__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.default__percentile", "macro_sql": "{% macro default__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.910744, "supported_languages": null}, "macro.fivetran_utils.redshift__percentile": {"name": "redshift__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.redshift__percentile", "macro_sql": "{% macro redshift__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.910903, "supported_languages": null}, "macro.fivetran_utils.bigquery__percentile": {"name": "bigquery__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.bigquery__percentile", "macro_sql": "{% macro bigquery__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.911059, "supported_languages": null}, "macro.fivetran_utils.postgres__percentile": {"name": "postgres__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.postgres__percentile", "macro_sql": "{% macro postgres__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n /* have to group by partition field */\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9111931, "supported_languages": null}, "macro.fivetran_utils.spark__percentile": {"name": "spark__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.spark__percentile", "macro_sql": "{% macro spark__percentile(percentile_field, partition_field, percent) %}\n\n percentile( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.91135, "supported_languages": null}, "macro.fivetran_utils.pivot_json_extract": {"name": "pivot_json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/pivot_json_extract.sql", "original_file_path": "macros/pivot_json_extract.sql", "unique_id": "macro.fivetran_utils.pivot_json_extract", "macro_sql": "{% macro pivot_json_extract(string, list_of_properties) %}\n\n{%- for property in list_of_properties -%}\n{%- if property is mapping -%}\nreplace( {{ fivetran_utils.json_extract(string, property.name) }}, '\"', '') as {{ property.alias if property.alias else property.name | replace(' ', '_') | replace('.', '_') | lower }}\n\n{%- else -%}\nreplace( {{ fivetran_utils.json_extract(string, property) }}, '\"', '') as {{ property | replace(' ', '_') | lower }}\n\n{%- endif -%}\n{%- if not loop.last -%},{%- endif %}\n{% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.912158, "supported_languages": null}, "macro.fivetran_utils.persist_pass_through_columns": {"name": "persist_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/persist_pass_through_columns.sql", "original_file_path": "macros/persist_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.persist_pass_through_columns", "macro_sql": "{% macro persist_pass_through_columns(pass_through_variable, identifier=none, transform='') %}\n\n{% if var(pass_through_variable, none) %}\n {% for field in var(pass_through_variable) %}\n , {{ transform ~ '(' ~ (identifier ~ '.' if identifier else '') ~ (field.alias if field.alias else field.name) ~ ')' }} as {{ field.alias if field.alias else field.name }}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.912759, "supported_languages": null}, "macro.fivetran_utils.json_parse": {"name": "json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.json_parse", "macro_sql": "{% macro json_parse(string, string_path) -%}\n\n{{ adapter.dispatch('json_parse', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__json_parse"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.914172, "supported_languages": null}, "macro.fivetran_utils.default__json_parse": {"name": "default__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.default__json_parse", "macro_sql": "{% macro default__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.914411, "supported_languages": null}, "macro.fivetran_utils.redshift__json_parse": {"name": "redshift__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.redshift__json_parse", "macro_sql": "{% macro redshift__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.914656, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_parse": {"name": "bigquery__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.bigquery__json_parse", "macro_sql": "{% macro bigquery__json_parse(string, string_path) %}\n\n \n json_extract_scalar({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.914888, "supported_languages": null}, "macro.fivetran_utils.postgres__json_parse": {"name": "postgres__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.postgres__json_parse", "macro_sql": "{% macro postgres__json_parse(string, string_path) %}\n\n {{string}}::json #>> '{ {%- for s in string_path -%}{{ s }}{%- if not loop.last -%},{%- endif -%}{%- endfor -%} }'\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.915114, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_parse": {"name": "snowflake__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.snowflake__json_parse", "macro_sql": "{% macro snowflake__json_parse(string, string_path) %}\n\n parse_json( {{string}} ) {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.915367, "supported_languages": null}, "macro.fivetran_utils.spark__json_parse": {"name": "spark__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.spark__json_parse", "macro_sql": "{% macro spark__json_parse(string, string_path) %}\n\n {{string}} : {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.915623, "supported_languages": null}, "macro.fivetran_utils.sqlserver__json_parse": {"name": "sqlserver__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.sqlserver__json_parse", "macro_sql": "{% macro sqlserver__json_parse(string, string_path) %}\n\n json_value({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9158468, "supported_languages": null}, "macro.fivetran_utils.max_bool": {"name": "max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.max_bool", "macro_sql": "{% macro max_bool(boolean_field) -%}\n\n{{ adapter.dispatch('max_bool', 'fivetran_utils') (boolean_field) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__max_bool"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9161642, "supported_languages": null}, "macro.fivetran_utils.default__max_bool": {"name": "default__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.default__max_bool", "macro_sql": "{% macro default__max_bool(boolean_field) %}\n\n bool_or( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9162638, "supported_languages": null}, "macro.fivetran_utils.snowflake__max_bool": {"name": "snowflake__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.snowflake__max_bool", "macro_sql": "{% macro snowflake__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.91637, "supported_languages": null}, "macro.fivetran_utils.bigquery__max_bool": {"name": "bigquery__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.bigquery__max_bool", "macro_sql": "{% macro bigquery__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.916466, "supported_languages": null}, "macro.fivetran_utils.calculated_fields": {"name": "calculated_fields", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/calculated_fields.sql", "original_file_path": "macros/calculated_fields.sql", "unique_id": "macro.fivetran_utils.calculated_fields", "macro_sql": "{% macro calculated_fields(variable) -%}\n\n{% if var(variable, none) %}\n {% for field in var(variable) %}\n , {{ field.transform_sql }} as {{ field.name }} \n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9168131, "supported_languages": null}, "macro.fivetran_utils.drop_schemas_automation": {"name": "drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.drop_schemas_automation", "macro_sql": "{% macro drop_schemas_automation(drop_target_schema=true) %}\n {{ return(adapter.dispatch('drop_schemas_automation', 'fivetran_utils')(drop_target_schema)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__drop_schemas_automation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9174862, "supported_languages": null}, "macro.fivetran_utils.default__drop_schemas_automation": {"name": "default__drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.default__drop_schemas_automation", "macro_sql": "{% macro default__drop_schemas_automation(drop_target_schema=true) %}\n\n{% set fetch_list_sql %}\n {% if target.type not in ('databricks', 'spark') %}\n select schema_name\n from \n {{ wrap_in_quotes(target.database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like '{{ target.schema | lower }}{%- if not drop_target_schema -%}_{%- endif -%}%'\n {% else %}\n SHOW SCHEMAS LIKE '{{ target.schema }}{%- if not drop_target_schema -%}_{%- endif -%}*'\n {% endif %}\n{% endset %}\n\n{% set results = run_query(fetch_list_sql) %}\n\n{% if execute %}\n {% set results_list = results.columns[0].values() %}\n{% else %}\n {% set results_list = [] %}\n{% endif %}\n\n{% for schema_to_drop in results_list %}\n {% do adapter.drop_schema(api.Relation.create(database=target.database, schema=schema_to_drop)) %}\n {{ print('Schema ' ~ schema_to_drop ~ ' successfully dropped from the ' ~ target.database ~ ' database.\\n')}}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.wrap_in_quotes", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.918636, "supported_languages": null}, "macro.fivetran_utils.seed_data_helper": {"name": "seed_data_helper", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/seed_data_helper.sql", "original_file_path": "macros/seed_data_helper.sql", "unique_id": "macro.fivetran_utils.seed_data_helper", "macro_sql": "{% macro seed_data_helper(seed_name, warehouses) %}\n\n{% if target.type in warehouses %}\n {% for w in warehouses %}\n {% if target.type == w %}\n {{ return(ref(seed_name ~ \"_\" ~ w ~ \"\")) }}\n {% endif %}\n {% endfor %}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.91918, "supported_languages": null}, "macro.fivetran_utils.fill_pass_through_columns": {"name": "fill_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_pass_through_columns.sql", "original_file_path": "macros/fill_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.fill_pass_through_columns", "macro_sql": "{% macro fill_pass_through_columns(pass_through_variable) %}\n\n{% if var(pass_through_variable) %}\n {% for field in var(pass_through_variable) %}\n {% if field is mapping %}\n {% if field.transform_sql %}\n , {{ field.transform_sql }} as {{ field.alias if field.alias else field.name }}\n {% else %}\n , {{ field.alias if field.alias else field.name }}\n {% endif %}\n {% else %}\n , {{ field }}\n {% endif %}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.919976, "supported_languages": null}, "macro.fivetran_utils.string_agg": {"name": "string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.string_agg", "macro_sql": "{% macro string_agg(field_to_agg, delimiter) -%}\n\n{{ adapter.dispatch('string_agg', 'fivetran_utils') (field_to_agg, delimiter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__string_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.920481, "supported_languages": null}, "macro.fivetran_utils.default__string_agg": {"name": "default__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.default__string_agg", "macro_sql": "{% macro default__string_agg(field_to_agg, delimiter) %}\n string_agg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.920613, "supported_languages": null}, "macro.fivetran_utils.snowflake__string_agg": {"name": "snowflake__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.snowflake__string_agg", "macro_sql": "{% macro snowflake__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.920743, "supported_languages": null}, "macro.fivetran_utils.redshift__string_agg": {"name": "redshift__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.redshift__string_agg", "macro_sql": "{% macro redshift__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.920867, "supported_languages": null}, "macro.fivetran_utils.spark__string_agg": {"name": "spark__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.spark__string_agg", "macro_sql": "{% macro spark__string_agg(field_to_agg, delimiter) %}\n -- collect set will remove duplicates\n replace(replace(replace(cast( collect_set({{ field_to_agg }}) as string), '[', ''), ']', ''), ', ', {{ delimiter }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9209962, "supported_languages": null}, "macro.fivetran_utils.timestamp_diff": {"name": "timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.timestamp_diff", "macro_sql": "{% macro timestamp_diff(first_date, second_date, datepart) %}\n {{ adapter.dispatch('timestamp_diff', 'fivetran_utils')(first_date, second_date, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__timestamp_diff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.923962, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_diff": {"name": "default__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.default__timestamp_diff", "macro_sql": "{% macro default__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.924117, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_diff": {"name": "redshift__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_diff", "macro_sql": "{% macro redshift__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9242702, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_diff": {"name": "bigquery__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_diff", "macro_sql": "{% macro bigquery__timestamp_diff(first_date, second_date, datepart) %}\n\n timestamp_diff(\n {{second_date}},\n {{first_date}},\n {{datepart}}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.924416, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_diff": {"name": "postgres__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_diff", "macro_sql": "{% macro postgres__timestamp_diff(first_date, second_date, datepart) %}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ dbt.datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.951587, "supported_languages": null}, "macro.fivetran_utils.try_cast": {"name": "try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.try_cast", "macro_sql": "{% macro try_cast(field, type) %}\n {{ adapter.dispatch('try_cast', 'fivetran_utils') (field, type) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__try_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.952576, "supported_languages": null}, "macro.fivetran_utils.default__try_cast": {"name": "default__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.default__try_cast", "macro_sql": "{% macro default__try_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.952715, "supported_languages": null}, "macro.fivetran_utils.redshift__try_cast": {"name": "redshift__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.redshift__try_cast", "macro_sql": "{% macro redshift__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when trim({{field}}) ~ '^(0|[1-9][0-9]*)$' then trim({{field}})\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.952982, "supported_languages": null}, "macro.fivetran_utils.postgres__try_cast": {"name": "postgres__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.postgres__try_cast", "macro_sql": "{% macro postgres__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar)) ~ '^(0|[1-9][0-9]*)$' \n then replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar))\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.953259, "supported_languages": null}, "macro.fivetran_utils.snowflake__try_cast": {"name": "snowflake__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.snowflake__try_cast", "macro_sql": "{% macro snowflake__try_cast(field, type) %}\n try_cast(cast({{field}} as varchar) as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.953388, "supported_languages": null}, "macro.fivetran_utils.bigquery__try_cast": {"name": "bigquery__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.bigquery__try_cast", "macro_sql": "{% macro bigquery__try_cast(field, type) %}\n safe_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.953517, "supported_languages": null}, "macro.fivetran_utils.spark__try_cast": {"name": "spark__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.spark__try_cast", "macro_sql": "{% macro spark__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9536378, "supported_languages": null}, "macro.fivetran_utils.sqlserver__try_cast": {"name": "sqlserver__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.sqlserver__try_cast", "macro_sql": "{% macro sqlserver__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.953762, "supported_languages": null}, "macro.fivetran_utils.source_relation": {"name": "source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.source_relation", "macro_sql": "{% macro source_relation(union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('source_relation', 'fivetran_utils') (union_schema_variable, union_database_variable) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__source_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.954242, "supported_languages": null}, "macro.fivetran_utils.default__source_relation": {"name": "default__source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.default__source_relation", "macro_sql": "{% macro default__source_relation(union_schema_variable, union_database_variable) %}\n\n{% if var(union_schema_variable, none) %}\n, case\n {% for schema in var(union_schema_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%.{{ schema|lower }}.%' then '{{ schema|lower }}'\n {% endfor %}\n end as source_relation\n{% elif var(union_database_variable, none) %}\n, case\n {% for database in var(union_database_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%{{ database|lower }}.%' then '{{ database|lower }}'\n {% endfor %}\n end as source_relation\n{% else %}\n, cast('' as {{ dbt.type_string() }}) as source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.954786, "supported_languages": null}, "macro.fivetran_utils.first_value": {"name": "first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.first_value", "macro_sql": "{% macro first_value(first_value_field, partition_field, order_by_field, order=\"asc\") -%}\n\n{{ adapter.dispatch('first_value', 'fivetran_utils') (first_value_field, partition_field, order_by_field, order) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__first_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.955271, "supported_languages": null}, "macro.fivetran_utils.default__first_value": {"name": "default__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.default__first_value", "macro_sql": "{% macro default__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.955466, "supported_languages": null}, "macro.fivetran_utils.redshift__first_value": {"name": "redshift__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.redshift__first_value", "macro_sql": "{% macro redshift__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} , {{ partition_field }} rows unbounded preceding )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.955678, "supported_languages": null}, "macro.fivetran_utils.add_dbt_source_relation": {"name": "add_dbt_source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_dbt_source_relation.sql", "original_file_path": "macros/add_dbt_source_relation.sql", "unique_id": "macro.fivetran_utils.add_dbt_source_relation", "macro_sql": "{% macro add_dbt_source_relation() %}\n\n{% if var('union_schemas', none) or var('union_databases', none) %}\n, _dbt_source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9559228, "supported_languages": null}, "macro.fivetran_utils.add_pass_through_columns": {"name": "add_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_pass_through_columns.sql", "original_file_path": "macros/add_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.add_pass_through_columns", "macro_sql": "{% macro add_pass_through_columns(base_columns, pass_through_var) %}\n\n {% if pass_through_var %}\n\n {% for column in pass_through_var %}\n\n {% if column is mapping %}\n\n {% if column.alias %}\n\n {% do base_columns.append({ \"name\": column.name, \"alias\": column.alias, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column.name, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n \n {% endif %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column, \"datatype\": dbt.type_string()}) %}\n\n {% endif %}\n\n {% endfor %}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.957143, "supported_languages": null}, "macro.fivetran_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, aliases=none, column_override=none, include=[], exclude=[], source_column_name=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n {%- set source_column_name = source_column_name if source_column_name is not none else '_dbt_source_relation' -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column in exclude -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column not in include -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ aliases[loop.index0] if aliases else relation }}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.961001, "supported_languages": null}, "macro.fivetran_utils.union_tables": {"name": "union_tables", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_tables", "macro_sql": "{%- macro union_tables(tables, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_table') -%}\n\n {%- do exceptions.warn(\"Warning: the `union_tables` macro is no longer supported and will be deprecated in a future release of dbt-utils. Use the `union_relations` macro instead\") -%}\n\n {{ return(dbt_utils.union_relations(tables, column_override, include, exclude, source_column_name)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.961347, "supported_languages": null}, "macro.fivetran_utils.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.fivetran_utils.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.961704, "supported_languages": null}, "macro.fivetran_utils.fill_staging_columns": {"name": "fill_staging_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.fill_staging_columns", "macro_sql": "{% macro fill_staging_columns(source_columns, staging_columns) -%}\n\n{%- set source_column_names = source_columns|map(attribute='name')|map('lower')|list -%}\n\n{%- for column in staging_columns %}\n {% if column.name|lower in source_column_names -%}\n {{ fivetran_utils.quote_column(column) }} as \n {%- if 'alias' in column %} {{ column.alias }} {% else %} {{ fivetran_utils.quote_column(column) }} {%- endif -%}\n {%- else -%}\n cast(null as {{ column.datatype }})\n {%- if 'alias' in column %} as {{ column.alias }} {% else %} as {{ fivetran_utils.quote_column(column) }} {% endif -%}\n {%- endif -%}\n {%- if not loop.last -%} , {% endif -%}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.quote_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9633641, "supported_languages": null}, "macro.fivetran_utils.quote_column": {"name": "quote_column", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.quote_column", "macro_sql": "{% macro quote_column(column) %}\n {% if 'quote' in column %}\n {% if column.quote %}\n {% if target.type in ('bigquery', 'spark', 'databricks') %}\n `{{ column.name }}`\n {% elif target.type == 'snowflake' %}\n \"{{ column.name | upper }}\"\n {% else %}\n \"{{ column.name }}\"\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.963902, "supported_languages": null}, "macro.fivetran_utils.json_extract": {"name": "json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.json_extract", "macro_sql": "{% macro json_extract(string, string_path) -%}\n\n{{ adapter.dispatch('json_extract', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.964522, "supported_languages": null}, "macro.fivetran_utils.default__json_extract": {"name": "default__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.default__json_extract", "macro_sql": "{% macro default__json_extract(string, string_path) %}\n\n json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} )\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9646761, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_extract": {"name": "snowflake__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.snowflake__json_extract", "macro_sql": "{% macro snowflake__json_extract(string, string_path) %}\n\n json_extract_path_text(try_parse_json( {{string}} ), {{ \"'\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.964829, "supported_languages": null}, "macro.fivetran_utils.redshift__json_extract": {"name": "redshift__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.redshift__json_extract", "macro_sql": "{% macro redshift__json_extract(string, string_path) %}\n\n case when is_valid_json( {{string}} ) then json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} ) else null end\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9649951, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_extract": {"name": "bigquery__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.bigquery__json_extract", "macro_sql": "{% macro bigquery__json_extract(string, string_path) %}\n\n json_extract_scalar({{string}}, {{ \"'$.\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.965141, "supported_languages": null}, "macro.fivetran_utils.postgres__json_extract": {"name": "postgres__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.postgres__json_extract", "macro_sql": "{% macro postgres__json_extract(string, string_path) %}\n\n {{string}}::json->>{{\"'\" ~ string_path ~ \"'\" }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.965285, "supported_languages": null}, "macro.fivetran_utils.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.966109, "supported_languages": null}, "macro.fivetran_utils.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n\n {%- set enabled_array = [] -%}\n {% for node in graph.sources.values() %}\n {% if node.identifier == source.identifier %}\n {% if (node.meta['is_enabled'] | default(true)) %}\n {%- do enabled_array.append(1) -%}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% set is_enabled = (enabled_array != []) %}\n\n select\n {% if is_enabled %}\n max({{ loaded_at_field }})\n {% else %} \n {{ current_timestamp() }} {% endif %} as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n\n {% if is_enabled %}\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endif %}\n\n {% endcall %}\n\n {% if dbt_version.split('.') | map('int') | list >= [1, 5, 0] %}\n {{ return(load_result('collect_freshness')) }}\n {% else %}\n {{ return(load_result('collect_freshness').table) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.967333, "supported_languages": null}, "macro.fivetran_utils.timestamp_add": {"name": "timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.timestamp_add", "macro_sql": "{% macro timestamp_add(datepart, interval, from_timestamp) -%}\n\n{{ adapter.dispatch('timestamp_add', 'fivetran_utils') (datepart, interval, from_timestamp) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__timestamp_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9680378, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_add": {"name": "default__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.default__timestamp_add", "macro_sql": "{% macro default__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestampadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.968222, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_add": {"name": "bigquery__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_add", "macro_sql": "{% macro bigquery__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestamp_add({{ from_timestamp }}, interval {{ interval }} {{ datepart }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.968395, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_add": {"name": "redshift__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_add", "macro_sql": "{% macro redshift__timestamp_add(datepart, interval, from_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.968606, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_add": {"name": "postgres__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_add", "macro_sql": "{% macro postgres__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ from_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9687839, "supported_languages": null}, "macro.fivetran_utils.spark__timestamp_add": {"name": "spark__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.spark__timestamp_add", "macro_sql": "{% macro spark__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ dbt.dateadd(datepart, interval, from_timestamp) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.968962, "supported_languages": null}, "macro.fivetran_utils.ceiling": {"name": "ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.ceiling", "macro_sql": "{% macro ceiling(num) -%}\n\n{{ adapter.dispatch('ceiling', 'fivetran_utils') (num) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__ceiling"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.969217, "supported_languages": null}, "macro.fivetran_utils.default__ceiling": {"name": "default__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.default__ceiling", "macro_sql": "{% macro default__ceiling(num) %}\n ceiling({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9693198, "supported_languages": null}, "macro.fivetran_utils.snowflake__ceiling": {"name": "snowflake__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.snowflake__ceiling", "macro_sql": "{% macro snowflake__ceiling(num) %}\n ceil({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9694211, "supported_languages": null}, "macro.fivetran_utils.remove_prefix_from_columns": {"name": "remove_prefix_from_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/remove_prefix_from_columns.sql", "original_file_path": "macros/remove_prefix_from_columns.sql", "unique_id": "macro.fivetran_utils.remove_prefix_from_columns", "macro_sql": "{% macro remove_prefix_from_columns(columns, prefix='', exclude=[]) %}\n\n {%- for col in columns if col.name not in exclude -%}\n {%- if col.name[:prefix|length]|lower == prefix -%}\n {{ col.name }} as {{ col.name[prefix|length:] }}\n {%- else -%}\n {{ col.name }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9700348, "supported_languages": null}, "macro.fivetran_utils.fivetran_date_spine": {"name": "fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.fivetran_date_spine", "macro_sql": "{% macro fivetran_date_spine(datepart, start_date, end_date) -%}\n\n{{ return(adapter.dispatch('fivetran_date_spine', 'fivetran_utils') (datepart, start_date, end_date)) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__fivetran_date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.971413, "supported_languages": null}, "macro.fivetran_utils.default__fivetran_date_spine": {"name": "default__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.default__fivetran_date_spine", "macro_sql": "{% macro default__fivetran_date_spine(datepart, start_date, end_date) %}\n\n {{ dbt_utils.date_spine(datepart, start_date, end_date) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.971585, "supported_languages": null}, "macro.fivetran_utils.sqlserver__fivetran_date_spine": {"name": "sqlserver__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.sqlserver__fivetran_date_spine", "macro_sql": "{% macro sqlserver__fivetran_date_spine(datepart, start_date, end_date) -%}\n\n {% set date_spine_query %}\n with\n\n l0 as (\n\n select c\n from (select 1 union all select 1) as d(c)\n\n ),\n l1 as (\n\n select\n 1 as c\n from l0 as a\n cross join l0 as b\n\n ),\n\n l2 as (\n\n select 1 as c\n from l1 as a\n cross join l1 as b\n ),\n\n l3 as (\n\n select 1 as c\n from l2 as a\n cross join l2 as b\n ),\n\n l4 as (\n\n select 1 as c\n from l3 as a\n cross join l3 as b\n ),\n\n l5 as (\n\n select 1 as c\n from l4 as a\n cross join l4 as b\n ),\n\n nums as (\n\n select row_number() over (order by (select null)) as rownum\n from l5\n ),\n\n rawdata as (\n\n select top ({{dbt.datediff(start_date, end_date, datepart)}}) rownum -1 as n\n from nums\n order by rownum\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n 'n',\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n order by 1\n\n {% endset %}\n\n {% set results = run_query(date_spine_query) %}\n\n {% if execute %}\n\n {% set results_list = results.columns[0].values() %}\n \n {% else %}\n\n {% set results_list = [] %}\n\n {% endif %}\n\n {%- for date_field in results_list %}\n select cast('{{ date_field }}' as date) as date_{{datepart}} {{ 'union all ' if not loop.last else '' }}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt.dateadd", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.972382, "supported_languages": null}, "macro.fivetran_utils.union_data": {"name": "union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.union_data", "macro_sql": "{%- macro union_data(table_identifier, database_variable, schema_variable, default_database, default_schema, default_variable, union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('union_data', 'fivetran_utils') (\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.default__union_data"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9760199, "supported_languages": null}, "macro.fivetran_utils.default__union_data": {"name": "default__union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.default__union_data", "macro_sql": "{%- macro default__union_data(\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) -%}\n\n{%- if var(union_schema_variable, none) -%}\n\n {%- set relations = [] -%}\n \n {%- if var(union_schema_variable) is string -%}\n {%- set trimmed = var(union_schema_variable)|trim('[')|trim(']') -%}\n {%- set schemas = trimmed.split(',')|map('trim',\" \")|map('trim','\"')|map('trim',\"'\") -%}\n {%- else -%}\n {%- set schemas = var(union_schema_variable) -%}\n {%- endif -%}\n\n {%- for schema in var(union_schema_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else var(database_variable, default_database),\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else schema,\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n \n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n \n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- elif var(union_database_variable, none) -%}\n\n {%- set relations = [] -%}\n\n {%- for database in var(union_database_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else database,\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else var(schema_variable, default_schema),\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n\n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n\n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- else -%}\n {% set exception_schemas = {\"linkedin_company_pages\": \"linkedin_pages\", \"instagram_business_pages\": \"instagram_business\"} %}\n {% set relation = namespace(value=\"\") %}\n {% if default_schema in exception_schemas.keys() %}\n {% for corrected_schema_name in exception_schemas.items() %} \n {% if default_schema in corrected_schema_name %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = corrected_schema_name[1] + \"_\" + table_identifier + \"_identifier\" %}\n {%- set relation.value=adapter.get_relation(\n database=source(corrected_schema_name[1], table_identifier).database,\n schema=source(corrected_schema_name[1], table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n {% endfor %}\n {% else %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifier\" %}\n {# Unfortunately the Twitter Organic identifiers were misspelled. As such, we will need to account for this in the model. This will be adjusted in the Twitter Organic package, but to ensure backwards compatibility, this needs to be included. #}\n {% if var(identifier_var, none) is none %} \n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifer\" %}\n {% endif %}\n {%- set relation.value=adapter.get_relation(\n database=source(default_schema, table_identifier).database,\n schema=source(default_schema, table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n{%- set table_exists=relation.value is not none -%}\n\n{%- if table_exists -%}\n select * \n from {{ relation.value }}\n{%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n{%- endif -%}\n{%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.98049, "supported_languages": null}, "macro.fivetran_utils.dummy_coalesce_value": {"name": "dummy_coalesce_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/dummy_coalesce_value.sql", "original_file_path": "macros/dummy_coalesce_value.sql", "unique_id": "macro.fivetran_utils.dummy_coalesce_value", "macro_sql": "{% macro dummy_coalesce_value(column) %}\n\n{% set coalesce_value = {\n 'STRING': \"'DUMMY_STRING'\",\n 'BOOLEAN': 'null',\n 'INT': 999999999,\n 'FLOAT': 999999999.99,\n 'TIMESTAMP': 'cast(\"2099-12-31\" as timestamp)',\n 'DATE': 'cast(\"2099-12-31\" as date)',\n} %}\n\n{% if column.is_float() %}\n{{ return(coalesce_value['FLOAT']) }}\n\n{% elif column.is_numeric() %}\n{{ return(coalesce_value['INT']) }}\n\n{% elif column.is_string() %}\n{{ return(coalesce_value['STRING']) }}\n\n{% elif column.data_type|lower == 'boolean' %}\n{{ return(coalesce_value['BOOLEAN']) }}\n\n{% elif 'timestamp' in column.data_type|lower %}\n{{ return(coalesce_value['TIMESTAMP']) }}\n\n{% elif 'date' in column.data_type|lower %}\n{{ return(coalesce_value['DATE']) }}\n\n{% elif 'int' in column.data_type|lower %}\n{{ return(coalesce_value['INT']) }}\n\n{% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9819, "supported_languages": null}, "macro.fivetran_utils.extract_url_parameter": {"name": "extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.extract_url_parameter", "macro_sql": "{% macro extract_url_parameter(field, url_parameter) -%}\n\n{{ adapter.dispatch('extract_url_parameter', 'fivetran_utils') (field, url_parameter) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__extract_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.982234, "supported_languages": null}, "macro.fivetran_utils.default__extract_url_parameter": {"name": "default__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.default__extract_url_parameter", "macro_sql": "{% macro default__extract_url_parameter(field, url_parameter) -%}\n\n{{ dbt_utils.get_url_parameter(field, url_parameter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9823842, "supported_languages": null}, "macro.fivetran_utils.spark__extract_url_parameter": {"name": "spark__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.spark__extract_url_parameter", "macro_sql": "{% macro spark__extract_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"=([^&]+)'\" -%}\nnullif(regexp_extract({{ field }}, {{ formatted_url_parameter }}, 1), '')\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9825752, "supported_languages": null}, "macro.fivetran_utils.wrap_in_quotes": {"name": "wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.wrap_in_quotes", "macro_sql": "{%- macro wrap_in_quotes(object_to_quote) -%}\n\n{{ return(adapter.dispatch('wrap_in_quotes', 'fivetran_utils')(object_to_quote)) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.postgres__wrap_in_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.982904, "supported_languages": null}, "macro.fivetran_utils.default__wrap_in_quotes": {"name": "default__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.default__wrap_in_quotes", "macro_sql": "{%- macro default__wrap_in_quotes(object_to_quote) -%}\n{# bigquery, spark, databricks #}\n `{{ object_to_quote }}`\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9830122, "supported_languages": null}, "macro.fivetran_utils.snowflake__wrap_in_quotes": {"name": "snowflake__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.snowflake__wrap_in_quotes", "macro_sql": "{%- macro snowflake__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote | upper }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.98312, "supported_languages": null}, "macro.fivetran_utils.redshift__wrap_in_quotes": {"name": "redshift__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.redshift__wrap_in_quotes", "macro_sql": "{%- macro redshift__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9832199, "supported_languages": null}, "macro.fivetran_utils.postgres__wrap_in_quotes": {"name": "postgres__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.postgres__wrap_in_quotes", "macro_sql": "{%- macro postgres__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.983319, "supported_languages": null}, "macro.fivetran_utils.array_agg": {"name": "array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.array_agg", "macro_sql": "{% macro array_agg(field_to_agg) -%}\n\n{{ adapter.dispatch('array_agg', 'fivetran_utils') (field_to_agg) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__array_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.983568, "supported_languages": null}, "macro.fivetran_utils.default__array_agg": {"name": "default__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.default__array_agg", "macro_sql": "{% macro default__array_agg(field_to_agg) %}\n array_agg({{ field_to_agg }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.983667, "supported_languages": null}, "macro.fivetran_utils.redshift__array_agg": {"name": "redshift__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.redshift__array_agg", "macro_sql": "{% macro redshift__array_agg(field_to_agg) %}\n listagg({{ field_to_agg }}, ',')\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.983763, "supported_languages": null}, "macro.fivetran_utils.empty_variable_warning": {"name": "empty_variable_warning", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/empty_variable_warning.sql", "original_file_path": "macros/empty_variable_warning.sql", "unique_id": "macro.fivetran_utils.empty_variable_warning", "macro_sql": "{% macro empty_variable_warning(variable, downstream_model) %}\n\n{% if not var(variable) %}\n{{ log(\n \"\"\"\n Warning: You have passed an empty list to the \"\"\" ~ variable ~ \"\"\".\n As a result, you won't see the history of any columns in the \"\"\" ~ downstream_model ~ \"\"\" model.\n \"\"\",\n info=True\n) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.984144, "supported_languages": null}, "macro.fivetran_utils.enabled_vars_one_true": {"name": "enabled_vars_one_true", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars_one_true.sql", "original_file_path": "macros/enabled_vars_one_true.sql", "unique_id": "macro.fivetran_utils.enabled_vars_one_true", "macro_sql": "{% macro enabled_vars_one_true(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, False) == True %}\n {{ return(True) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(False) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.984527, "supported_languages": null}, "macro.zendesk.regex_extract": {"name": "regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.regex_extract", "macro_sql": "{% macro regex_extract(string, day) -%}\n\n{{ adapter.dispatch('regex_extract', 'zendesk') (string, day) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.postgres__regex_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.98535, "supported_languages": null}, "macro.zendesk.default__regex_extract": {"name": "default__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.default__regex_extract", "macro_sql": "{% macro default__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n regexp_extract({{ string }}, {{ regex }} )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9855518, "supported_languages": null}, "macro.zendesk.bigquery__regex_extract": {"name": "bigquery__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.bigquery__regex_extract", "macro_sql": "{% macro bigquery__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n regexp_extract({{ string }}, {{ regex }} )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.98574, "supported_languages": null}, "macro.zendesk.snowflake__regex_extract": {"name": "snowflake__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.snowflake__regex_extract", "macro_sql": "{% macro snowflake__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n\n REGEXP_SUBSTR({{ string }}, {{ regex }}, 1, 1, 'e', 1 )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.985999, "supported_languages": null}, "macro.zendesk.postgres__regex_extract": {"name": "postgres__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.postgres__regex_extract", "macro_sql": "{% macro postgres__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" %}\n\n (regexp_matches({{ string }}, {{ regex }}))[1]\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.986191, "supported_languages": null}, "macro.zendesk.redshift__regex_extract": {"name": "redshift__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.redshift__regex_extract", "macro_sql": "{% macro redshift__regex_extract(string, day) %}\n\n {% set regex = '\"' ~ day ~ '\"' ~ ':\\\\\\{([^\\\\\\}]*)\\\\\\}' -%}\n\n '{' || REGEXP_SUBSTR({{ string }}, '{{ regex }}', 1, 1, 'e') || '}'\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.986428, "supported_languages": null}, "macro.zendesk.spark__regex_extract": {"name": "spark__regex_extract", "resource_type": "macro", "package_name": "zendesk", "path": "macros/regex_extract.sql", "original_file_path": "macros/regex_extract.sql", "unique_id": "macro.zendesk.spark__regex_extract", "macro_sql": "{% macro spark__regex_extract(string, day) %}\n {% set regex = \"'.*?\" ~ day ~ \".*?({.*?})'\" | replace(\"{\", \"\\\\\\{\") | replace(\"}\", \"\\\\\\}\") %}\n regexp_extract({{ string }}, {{ regex }}, 1)\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.986756, "supported_languages": null}, "macro.zendesk.coalesce_cast": {"name": "coalesce_cast", "resource_type": "macro", "package_name": "zendesk", "path": "macros/coalesce_cast.sql", "original_file_path": "macros/coalesce_cast.sql", "unique_id": "macro.zendesk.coalesce_cast", "macro_sql": "{% macro coalesce_cast(column_list, datatype) -%}\n {{ return(adapter.dispatch('coalesce_cast', 'zendesk')(column_list, datatype)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.default__coalesce_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.987094, "supported_languages": null}, "macro.zendesk.default__coalesce_cast": {"name": "default__coalesce_cast", "resource_type": "macro", "package_name": "zendesk", "path": "macros/coalesce_cast.sql", "original_file_path": "macros/coalesce_cast.sql", "unique_id": "macro.zendesk.default__coalesce_cast", "macro_sql": "{% macro default__coalesce_cast(column_list, datatype) %}\n coalesce(\n {%- for column in column_list %}\n cast({{ column }} as {{ datatype }})\n {%- if not loop.last -%},{%- endif -%}\n {% endfor %}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.987339, "supported_languages": null}, "macro.zendesk.clean_schedule": {"name": "clean_schedule", "resource_type": "macro", "package_name": "zendesk", "path": "macros/clean_schedule.sql", "original_file_path": "macros/clean_schedule.sql", "unique_id": "macro.zendesk.clean_schedule", "macro_sql": "{% macro clean_schedule(column_name) -%}\n {{ return(adapter.dispatch('clean_schedule', 'zendesk')(column_name)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.default__clean_schedule"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9875998, "supported_languages": null}, "macro.zendesk.default__clean_schedule": {"name": "default__clean_schedule", "resource_type": "macro", "package_name": "zendesk", "path": "macros/clean_schedule.sql", "original_file_path": "macros/clean_schedule.sql", "unique_id": "macro.zendesk.default__clean_schedule", "macro_sql": "{% macro default__clean_schedule(column_name) -%}\n replace(replace(replace(replace(cast({{ column_name }} as {{ dbt.type_string() }}), '{', ''), '}', ''), '\"', ''), ' ', '')\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.987741, "supported_languages": null}, "macro.zendesk.count_tokens": {"name": "count_tokens", "resource_type": "macro", "package_name": "zendesk", "path": "macros/count_tokens.sql", "original_file_path": "macros/count_tokens.sql", "unique_id": "macro.zendesk.count_tokens", "macro_sql": "{% macro count_tokens(column_name) -%}\n {{ return(adapter.dispatch('count_tokens', 'zendesk')(column_name)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.zendesk.default__count_tokens"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.987991, "supported_languages": null}, "macro.zendesk.default__count_tokens": {"name": "default__count_tokens", "resource_type": "macro", "package_name": "zendesk", "path": "macros/count_tokens.sql", "original_file_path": "macros/count_tokens.sql", "unique_id": "macro.zendesk.default__count_tokens", "macro_sql": "{% macro default__count_tokens(column_name) %}\n {{ dbt.length(column_name) }} / 4 -- 1 token is approximately 4 characters, and we only need an approximation here.\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.988121, "supported_languages": null}, "macro.zendesk_source.get_domain_name_columns": {"name": "get_domain_name_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_domain_name_columns.sql", "original_file_path": "macros/get_domain_name_columns.sql", "unique_id": "macro.zendesk_source.get_domain_name_columns", "macro_sql": "{% macro get_domain_name_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"domain_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"index\", \"datatype\": dbt.type_int()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.988677, "supported_languages": null}, "macro.zendesk_source.get_user_tag_columns": {"name": "get_user_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_tag_columns.sql", "original_file_path": "macros/get_user_tag_columns.sql", "unique_id": "macro.zendesk_source.get_user_tag_columns", "macro_sql": "{% macro get_user_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.989685, "supported_languages": null}, "macro.zendesk_source.get_audit_log_columns": {"name": "get_audit_log_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_audit_log_columns.sql", "original_file_path": "macros/get_audit_log_columns.sql", "unique_id": "macro.zendesk_source.get_audit_log_columns", "macro_sql": "{% macro get_audit_log_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"action\", \"datatype\": dbt.type_string()},\n {\"name\": \"actor_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"change_description\", \"datatype\": dbt.type_string()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"source_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"source_label\", \"datatype\": dbt.type_string()},\n {\"name\": \"source_type\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.990696, "supported_languages": null}, "macro.zendesk_source.get_ticket_form_history_columns": {"name": "get_ticket_form_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_form_history_columns.sql", "original_file_path": "macros/get_ticket_form_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_form_history_columns", "macro_sql": "{% macro get_ticket_form_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"display_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"end_user_visible\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.991679, "supported_languages": null}, "macro.zendesk_source.get_schedule_columns": {"name": "get_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_columns.sql", "original_file_path": "macros/get_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_columns", "macro_sql": "{% macro get_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"end_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"start_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9928489, "supported_languages": null}, "macro.zendesk_source.get_daylight_time_columns": {"name": "get_daylight_time_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_daylight_time_columns.sql", "original_file_path": "macros/get_daylight_time_columns.sql", "unique_id": "macro.zendesk_source.get_daylight_time_columns", "macro_sql": "{% macro get_daylight_time_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"daylight_end_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"daylight_offset\", \"datatype\": dbt.type_int()},\n {\"name\": \"daylight_start_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"year\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9935858, "supported_languages": null}, "macro.zendesk_source.get_time_zone_columns": {"name": "get_time_zone_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_time_zone_columns.sql", "original_file_path": "macros/get_time_zone_columns.sql", "unique_id": "macro.zendesk_source.get_time_zone_columns", "macro_sql": "{% macro get_time_zone_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"standard_offset\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.994056, "supported_languages": null}, "macro.zendesk_source.get_ticket_tag_columns": {"name": "get_ticket_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_tag_columns.sql", "original_file_path": "macros/get_ticket_tag_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_tag_columns", "macro_sql": "{% macro get_ticket_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.995138, "supported_languages": null}, "macro.zendesk_source.get_organization_tag_columns": {"name": "get_organization_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_tag_columns.sql", "original_file_path": "macros/get_organization_tag_columns.sql", "unique_id": "macro.zendesk_source.get_organization_tag_columns", "macro_sql": "{% macro get_organization_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.996149, "supported_languages": null}, "macro.zendesk_source.get_schedule_holiday_columns": {"name": "get_schedule_holiday_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_holiday_columns.sql", "original_file_path": "macros/get_schedule_holiday_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_holiday_columns", "macro_sql": "{% macro get_schedule_holiday_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_date\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_date\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.996963, "supported_languages": null}, "macro.zendesk_source.get_group_columns": {"name": "get_group_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_group_columns.sql", "original_file_path": "macros/get_group_columns.sql", "unique_id": "macro.zendesk_source.get_group_columns", "macro_sql": "{% macro get_group_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513414.9977732, "supported_languages": null}, "macro.zendesk_source.get_user_columns": {"name": "get_user_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_columns.sql", "original_file_path": "macros/get_user_columns.sql", "unique_id": "macro.zendesk_source.get_user_columns", "macro_sql": "{% macro get_user_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"alias\", \"datatype\": dbt.type_string()},\n {\"name\": \"authenticity_token\", \"datatype\": dbt.type_int()},\n {\"name\": \"chat_only\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"email\", \"datatype\": dbt.type_string()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"last_login_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"locale\", \"datatype\": dbt.type_string()},\n {\"name\": \"locale_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"moderator\", \"datatype\": \"boolean\"},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"only_private_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"phone\", \"datatype\": dbt.type_string()},\n {\"name\": \"remote_photo_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"restricted_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"role\", \"datatype\": dbt.type_string()},\n {\"name\": \"shared\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"signature\", \"datatype\": dbt.type_int()},\n {\"name\": \"suspended\", \"datatype\": \"boolean\"},\n {\"name\": \"ticket_restriction\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"two_factor_auth_enabled\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"verified\", \"datatype\": \"boolean\"}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__user_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_boolean", "macro.dbt.type_string", "macro.dbt.type_int", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513415.001319, "supported_languages": null}, "macro.zendesk_source.get_ticket_columns": {"name": "get_ticket_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_columns.sql", "original_file_path": "macros/get_ticket_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_columns", "macro_sql": "{% macro get_ticket_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"allow_channelback\", \"datatype\": \"boolean\"},\n {\"name\": \"assignee_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"brand_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"description\", \"datatype\": dbt.type_string()},\n {\"name\": \"due_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"forum_topic_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"has_incidents\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"is_public\", \"datatype\": \"boolean\"},\n {\"name\": \"merged_ticket_ids\", \"datatype\": dbt.type_string()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"priority\", \"datatype\": dbt.type_string()},\n {\"name\": \"problem_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"recipient\", \"datatype\": dbt.type_int()},\n {\"name\": \"requester_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"status\", \"datatype\": dbt.type_string()},\n {\"name\": \"subject\", \"datatype\": dbt.type_string()},\n {\"name\": \"submitter_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_ccs\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_client\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_ip_address\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_json_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_latitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_location\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_longitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_machine_generated\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_message_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_raw_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_form_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"type\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_channel\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_source_from_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_title\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_rel\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_name\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__ticket_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_boolean", "macro.dbt.type_int", "macro.dbt.type_string", "macro.dbt.type_float", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513415.006272, "supported_languages": null}, "macro.zendesk_source.get_ticket_field_history_columns": {"name": "get_ticket_field_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_field_history_columns.sql", "original_file_path": "macros/get_ticket_field_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_field_history_columns", "macro_sql": "{% macro get_ticket_field_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"field_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"updated\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"value\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513415.0070348, "supported_languages": null}, "macro.zendesk_source.get_ticket_schedule_columns": {"name": "get_ticket_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_schedule_columns.sql", "original_file_path": "macros/get_ticket_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_schedule_columns", "macro_sql": "{% macro get_ticket_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513415.007586, "supported_languages": null}, "macro.zendesk_source.get_organization_columns": {"name": "get_organization_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_columns.sql", "original_file_path": "macros/get_organization_columns.sql", "unique_id": "macro.zendesk_source.get_organization_columns", "macro_sql": "{% macro get_organization_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"shared_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_tickets\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__organization_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513415.00893, "supported_languages": null}, "macro.zendesk_source.get_ticket_comment_columns": {"name": "get_ticket_comment_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_comment_columns.sql", "original_file_path": "macros/get_ticket_comment_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_comment_columns", "macro_sql": "{% macro get_ticket_comment_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_string()},\n {\"name\": \"_fivetran_deleted\", \"datatype\": dbt.type_boolean()},\n {\"name\": \"body\", \"datatype\": dbt.type_string()},\n {\"name\": \"call_duration\", \"datatype\": dbt.type_int()},\n {\"name\": \"call_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"facebook_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"location\", \"datatype\": dbt.type_int()},\n {\"name\": \"public\", \"datatype\": \"boolean\"},\n {\"name\": \"recording_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"started_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_status\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_text\", \"datatype\": dbt.type_int()},\n {\"name\": \"trusted\", \"datatype\": dbt.type_int()},\n {\"name\": \"tweet\", \"datatype\": \"boolean\"},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"voice_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"voice_comment_transcription_visible\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.type_boolean", "macro.dbt.type_int", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513415.011038, "supported_languages": null}, "macro.zendesk_source.get_brand_columns": {"name": "get_brand_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_brand_columns.sql", "original_file_path": "macros/get_brand_columns.sql", "unique_id": "macro.zendesk_source.get_brand_columns", "macro_sql": "{% macro get_brand_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"brand_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"has_help_center\", \"datatype\": \"boolean\"},\n {\"name\": \"help_center_state\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_content_type\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_file_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_height\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_inline\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_mapped_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_size\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_width\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"subdomain\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1728513415.013185, "supported_languages": null}}, "docs": {"doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {}, "groups": {}, "selectors": {}, "disabled": {"test.zendesk_integration_tests.consistency_ticket_metrics": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_ticket_metrics", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_ticket_metrics.sql", "original_file_path": "tests/consistency/consistency_ticket_metrics.sql", "unique_id": "test.zendesk_integration_tests.consistency_ticket_metrics", "fqn": ["zendesk_integration_tests", "consistency", "consistency_ticket_metrics"], "alias": "consistency_ticket_metrics", "checksum": {"name": "sha256", "checksum": "e630be25d326f99cdad0ebc1d29e71dcd7514aa3e56c999e56d1ed15bc6c10e0"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728513415.3847082, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_prod.zendesk__ticket_metrics\n),\n\ndev as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics\n),\n\nfinal as (\n select \n prod.ticket_id,\n prod.first_reply_time_business_minutes as prod_first_reply_time_business_minutes,\n dev.first_reply_time_business_minutes as dev_first_reply_time_business_minutes,\n prod.first_reply_time_calendar_minutes as prod_first_reply_time_calendar_minutes,\n dev.first_reply_time_calendar_minutes as dev_first_reply_time_calendar_minutes\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n)\n\nselect *\nfrom final\nwhere (abs(prod_first_reply_time_business_minutes - dev_first_reply_time_business_minutes) >= 5\n or abs(prod_first_reply_time_calendar_minutes - dev_first_reply_time_calendar_minutes) >= 5)\n {{ \"and ticket_id not in \" ~ var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policy_count": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_sla_policy_count", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policy_count.sql", "original_file_path": "tests/consistency/consistency_sla_policy_count.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policy_count", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policy_count"], "alias": "consistency_sla_policy_count", "checksum": {"name": "sha256", "checksum": "b30a06ff7e3d392b2fdfa6b5f34633f6c7f8e018e31eef64fcdf2eeaffcae18a"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728513415.40037, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n {{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) }}\n group by 1\n),\n\ndev as (\n select\n ticket_id,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n {{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policy_count_exclusion_tickets',[]) }}\n group by 1\n),\n\nfinal as (\n select \n prod.ticket_id as prod_ticket_id,\n dev.ticket_id as dev_ticket_id,\n prod.total_slas as prod_sla_total,\n dev.total_slas as dev_sla_total\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n)\n\nselect *\nfrom final\nwhere prod_sla_total != dev_sla_total", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policies": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "consistency_sla_policies", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policies.sql", "original_file_path": "tests/consistency/consistency_sla_policies.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policies", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policies"], "alias": "consistency_sla_policies", "checksum": {"name": "sha256", "checksum": "bdad5490a4a975665c4b658101726f92c08755dd96f6372d8606b47e60fe29d4"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728513415.403881, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select \n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n round(sla_elapsed_time, -1) as sla_elapsed_time, --round to the nearest tens\n is_active_sla,\n is_sla_breach\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n),\n\ndev as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n round(sla_elapsed_time, -1) as sla_elapsed_time, --round to the nearest tens\n is_active_sla,\n is_sla_breach\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n),\n\nprod_not_in_dev as (\n -- rows from prod not found in dev\n select * from prod\n except distinct\n select * from dev\n),\n\ndev_not_in_prod as (\n -- rows from dev not found in prod\n select * from dev\n except distinct\n select * from prod\n),\n\nfinal as (\n select\n *,\n 'from prod' as source\n from prod_not_in_dev\n\n union all -- union since we only care if rows are produced\n\n select\n *,\n 'from dev' as source\n from dev_not_in_prod\n)\n\nselect *\nfrom final\n{{ \"where ticket_id not in \" ~ var('fivetran_consistency_sla_policies_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policies_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.metrics_count_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "metrics_count_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/metrics_count_match.sql", "original_file_path": "tests/integrity/metrics_count_match.sql", "unique_id": "test.zendesk_integration_tests.metrics_count_match", "fqn": ["zendesk_integration_tests", "integrity", "metrics_count_match"], "alias": "metrics_count_match", "checksum": {"name": "sha256", "checksum": "a1b9b09d680906335f534a5707924cdd7975615c0f3192a51e790183e4625724"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728513415.4075599, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n-- check that all the tickets are accounted for in the metrics\nwith stg_count as (\n select\n count(*) as stg_ticket_count\n from {{ ref('stg_zendesk__ticket') }}\n),\n\nmetric_count as (\n select\n count(*) as metric_ticket_count\n from {{ ref('zendesk__ticket_metrics') }}\n)\n\nselect *\nfrom stg_count\njoin metric_count\n on stg_ticket_count != metric_ticket_count", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_metrics_parity": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_metrics_parity", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_metrics_parity.sql", "original_file_path": "tests/integrity/sla_metrics_parity.sql", "unique_id": "test.zendesk_integration_tests.sla_metrics_parity", "fqn": ["zendesk_integration_tests", "integrity", "sla_metrics_parity"], "alias": "sla_metrics_parity", "checksum": {"name": "sha256", "checksum": "d18407ef45d1ce6b2d4eeaca9286dfb8b3b1db85021e3fd69701fb0c33138675"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728513415.410409, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n/*\nThis test is to ensure the sla_elapsed_time from zendesk__sla_policies matches the corresponding time in zendesk__ticket_metrics.\n*/\n\nwith dev_slas as (\n select *\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n where in_business_hours\n\n), dev_metrics as (\n select *\n from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics\n\n), dev_compare as (\n select \n dev_slas.ticket_id,\n dev_slas.metric,\n cast(dev_slas.sla_elapsed_time as {{ dbt.type_int() }}) as time_from_slas,\n case when metric = 'agent_work_time' then dev_metrics.agent_work_time_in_business_minutes\n when metric = 'requester_wait_time' then dev_metrics.requester_wait_time_in_business_minutes\n when metric = 'first_reply_time' then dev_metrics.first_reply_time_business_minutes\n end as time_from_metrics\n from dev_slas\n left join dev_metrics\n on dev_metrics.ticket_id = dev_slas.ticket_id\n)\n\nselect *\nfrom dev_compare\nwhere abs(time_from_slas - time_from_metrics) >= 5\n{{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_metric_parity_exclusion_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_metric_parity_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_first_reply_time_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_first_reply_time_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_first_reply_time_match.sql", "original_file_path": "tests/integrity/sla_first_reply_time_match.sql", "unique_id": "test.zendesk_integration_tests.sla_first_reply_time_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_first_reply_time_match"], "alias": "sla_first_reply_time_match", "checksum": {"name": "sha256", "checksum": "a94e41e1bdbc5f4cb6268590d22f37692a708dd7471344b09e2d29a4edf4ccea"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728513415.414742, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith ticket_metrics as (\n select\n ticket_id,\n first_reply_time_business_minutes\n from {{ ref('zendesk__ticket_metrics') }}\n),\n\nsla_policies as (\n select\n ticket_id,\n sla_elapsed_time\n from {{ ref('zendesk__sla_policies') }}\n where metric = 'first_reply_time'\n and in_business_hours\n),\n\nmatch_check as (\n select \n ticket_metrics.ticket_id,\n ticket_metrics.first_reply_time_business_minutes,\n sla_policies.sla_elapsed_time\n from ticket_metrics\n full outer join sla_policies \n on ticket_metrics.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere abs(round(first_reply_time_business_minutes,0) - round(sla_elapsed_time,0)) >= 2\n {{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_count_match": [{"database": "postgres", "schema": "zz_zendesk_dbt_test__audit", "name": "sla_count_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_count_match.sql", "original_file_path": "tests/integrity/sla_count_match.sql", "unique_id": "test.zendesk_integration_tests.sla_count_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_count_match"], "alias": "sla_count_match", "checksum": {"name": "sha256", "checksum": "b1f23baf0d04729d4855197e4e5f6e76bf72502c3739371ebee1a6d626a6d8b8"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1728513415.4179258, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n-- The necessary source and source_filter adjustments used below originate from the int_zendesk__sla_policy_applied model\nwith source as (\n select\n *,\n case when field_name = 'first_reply_time' then row_number() over (partition by ticket_id, field_name order by valid_starting_at desc) else 1 end as latest_sla\n from {{ ref('stg_zendesk__ticket_field_history') }}\n),\n\nsource_filter as (\n select\n ticket_id,\n count(*) as source_row_count\n from source\n where field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n and value is not null\n and latest_sla = 1\n group by 1\n),\n\nsla_policies as (\n select\n ticket_id,\n count(*) as end_model_row_count\n from {{ ref('zendesk__sla_policies') }}\n group by 1\n),\n\nmatch_check as (\n select \n sla_policies.ticket_id,\n end_model_row_count,\n source_row_count\n from sla_policies\n full outer join source_filter\n on source_filter.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere end_model_row_count != source_row_count\n{{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_count_match_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_count_match_tickets',[]) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "seed.zendesk_integration_tests.organization_tag_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "organization_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data_snowflake.csv", "original_file_path": "seeds/organization_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "organization_tag_data_snowflake"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "d9219b78d44b8b4620100b064a3af350fb5fa2046bdb0c376a09bade7a99f6f7"}, "config": {"enabled": false, "alias": "organization_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "organization_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1728513415.4905741, "relation_name": "\"postgres\".\"zz_zendesk\".\"organization_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.brand_data": [{"database": "postgres", "schema": "zz_zendesk", "name": "brand_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data.csv", "original_file_path": "seeds/brand_data.csv", "unique_id": "seed.zendesk_integration_tests.brand_data", "fqn": ["zendesk_integration_tests", "brand_data"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "203980ef5845715ee0758982a85b96a30c8e4b06fbda7f104705bd4cdd586aa9"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'postgres' else false }}"}, "created_at": 1728513415.496682, "relation_name": "\"postgres\".\"zz_zendesk\".\"brand_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.user_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "user_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data_snowflake.csv", "original_file_path": "seeds/user_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_data_snowflake", "fqn": ["zendesk_integration_tests", "user_data_snowflake"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "1d7712839e43bb49c4fb8a2bba60a98e8c3ea558c91a3d4fb4f4db6e1425f178"}, "config": {"enabled": false, "alias": "user_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "external_id": "bigint", "locale_id": "bigint", "organization_id": "bigint", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "alias": "user_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1728513415.499111, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}], "seed.zendesk_integration_tests.user_tag_data_snowflake": [{"database": "postgres", "schema": "zz_zendesk", "name": "user_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data_snowflake.csv", "original_file_path": "seeds/user_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "user_tag_data_snowflake"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "7c2274e05f81c1f9906a6a4a217c4493bf003a151402391069f49c64cf9ec5fb"}, "config": {"enabled": false, "alias": "user_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "user_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1728513415.501477, "relation_name": "\"postgres\".\"zz_zendesk\".\"user_tag_data\"", "raw_code": "", "root_path": "/Users/catherinefritz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}]}, "parent_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.audit_log_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.brand_data_postgres": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__group"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.zendesk__ticket_summary": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.zendesk__sla_policies": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.zendesk__ticket_backlog": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__schedule"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__sla_policy_applied"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__updater_information", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_enriched", "source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__calendar_spine": ["model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__timezone_daylight": ["model.zendesk_source.stg_zendesk__daylight_time", "model.zendesk_source.stg_zendesk__time_zone"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.zendesk__document": ["model.zendesk.int_zendesk__ticket_comment_documents_grouped", "model.zendesk.int_zendesk__ticket_document"], "model.zendesk.int_zendesk__ticket_comment_documents_grouped": ["model.zendesk.int_zendesk__ticket_comment_document"], "model.zendesk.int_zendesk__ticket_comment_document": ["model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__ticket_document": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__updates": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__schedule_holiday", "model.zendesk.int_zendesk__schedule_timezones"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__schedule_timezones": ["model.zendesk.int_zendesk__schedule_history", "model.zendesk.int_zendesk__timezone_daylight", "model.zendesk_source.stg_zendesk__schedule"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__schedule_history": ["model.zendesk_source.stg_zendesk__audit_log"], "model.zendesk.int_zendesk__schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk_source.stg_zendesk__domain_name", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk_source.stg_zendesk__group_tmp"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk_source.stg_zendesk__user_tmp"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "model.zendesk_source.stg_zendesk__audit_log": ["model.zendesk_source.stg_zendesk__audit_log_tmp"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk_source.stg_zendesk__ticket_tmp"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["source.zendesk_source.zendesk.daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["source.zendesk_source.zendesk.user"], "model.zendesk_source.stg_zendesk__group_tmp": ["source.zendesk_source.zendesk.group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["source.zendesk_source.zendesk.ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["source.zendesk_source.zendesk.brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["source.zendesk_source.zendesk.ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["source.zendesk_source.zendesk.schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["source.zendesk_source.zendesk.user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["source.zendesk_source.zendesk.ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["source.zendesk_source.zendesk.ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["source.zendesk_source.zendesk.organization_tag"], "model.zendesk_source.stg_zendesk__audit_log_tmp": ["source.zendesk_source.zendesk.audit_log"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["source.zendesk_source.zendesk.schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["source.zendesk_source.zendesk.organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["source.zendesk_source.zendesk.ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["source.zendesk_source.zendesk.domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["source.zendesk_source.zendesk.time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": ["model.zendesk.zendesk__sla_policies"], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": ["model.zendesk_source.stg_zendesk__domain_name"], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": ["model.zendesk_source.stg_zendesk__daylight_time"], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "source.zendesk_source.zendesk.audit_log": [], "source.zendesk_source.zendesk.ticket": [], "source.zendesk_source.zendesk.brand": [], "source.zendesk_source.zendesk.domain_name": [], "source.zendesk_source.zendesk.group": [], "source.zendesk_source.zendesk.organization_tag": [], "source.zendesk_source.zendesk.organization": [], "source.zendesk_source.zendesk.ticket_comment": [], "source.zendesk_source.zendesk.user_tag": [], "source.zendesk_source.zendesk.user": [], "source.zendesk_source.zendesk.schedule": [], "source.zendesk_source.zendesk.ticket_schedule": [], "source.zendesk_source.zendesk.ticket_form_history": [], "source.zendesk_source.zendesk.ticket_tag": [], "source.zendesk_source.zendesk.ticket_field_history": [], "source.zendesk_source.zendesk.daylight_time": [], "source.zendesk_source.zendesk.time_zone": [], "source.zendesk_source.zendesk.schedule_holiday": []}, "child_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.audit_log_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.brand_data_postgres": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.zendesk__ticket_metrics", "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.zendesk__ticket_summary", "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c"], "model.zendesk.zendesk__ticket_summary": [], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.zendesk__ticket_backlog"], "model.zendesk.zendesk__sla_policies": ["test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd"], "model.zendesk.zendesk__ticket_backlog": [], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_reply_times"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__field_history_enriched"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__calendar_spine": ["model.zendesk.int_zendesk__field_calendar_spine"], "model.zendesk.int_zendesk__timezone_daylight": ["model.zendesk.int_zendesk__schedule_timezones"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.zendesk__document": [], "model.zendesk.int_zendesk__ticket_comment_documents_grouped": ["model.zendesk.zendesk__document"], "model.zendesk.int_zendesk__ticket_comment_document": ["model.zendesk.int_zendesk__ticket_comment_documents_grouped"], "model.zendesk.int_zendesk__ticket_document": ["model.zendesk.zendesk__document"], "model.zendesk.int_zendesk__updates": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__schedule_timezones": ["model.zendesk.int_zendesk__schedule_spine"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__schedule_history": ["model.zendesk.int_zendesk__schedule_timezones"], "model.zendesk.int_zendesk__schedule_holiday": ["model.zendesk.int_zendesk__schedule_spine"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk.int_zendesk__user_aggregates"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk.int_zendesk__ticket_aggregates"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_history_enriched", "model.zendesk.int_zendesk__updates"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk.int_zendesk__schedule_holiday", "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk.int_zendesk__timezone_daylight", "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk.int_zendesk__timezone_daylight", "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_enriched", "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk.int_zendesk__ticket_comment_document", "model.zendesk.int_zendesk__updates", "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__schedule_holiday", "model.zendesk.int_zendesk__schedule_timezones", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__ticket_comment_document", "model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_summary", "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk.int_zendesk__latest_ticket_form", "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17"], "model.zendesk_source.stg_zendesk__audit_log": ["model.zendesk.int_zendesk__schedule_history"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk.int_zendesk__organization_aggregates", "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk.int_zendesk__organization_aggregates"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__calendar_spine", "model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_document", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["model.zendesk_source.stg_zendesk__daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["model.zendesk_source.stg_zendesk__user"], "model.zendesk_source.stg_zendesk__group_tmp": ["model.zendesk_source.stg_zendesk__group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["model.zendesk_source.stg_zendesk__ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["model.zendesk_source.stg_zendesk__brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["model.zendesk_source.stg_zendesk__ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["model.zendesk_source.stg_zendesk__organization_tag"], "model.zendesk_source.stg_zendesk__audit_log_tmp": ["model.zendesk_source.stg_zendesk__audit_log"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["model.zendesk_source.stg_zendesk__schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["model.zendesk_source.stg_zendesk__organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["model.zendesk_source.stg_zendesk__domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": [], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": [], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": [], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": [], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": [], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": [], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": [], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": [], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": [], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": [], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": [], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": [], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": [], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": [], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": [], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": [], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": [], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": [], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": [], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": [], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": [], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": [], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": [], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": [], "source.zendesk_source.zendesk.audit_log": ["model.zendesk_source.stg_zendesk__audit_log_tmp"], "source.zendesk_source.zendesk.ticket": ["model.zendesk_source.stg_zendesk__ticket_tmp"], "source.zendesk_source.zendesk.brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "source.zendesk_source.zendesk.domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "source.zendesk_source.zendesk.group": ["model.zendesk_source.stg_zendesk__group_tmp"], "source.zendesk_source.zendesk.organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "source.zendesk_source.zendesk.organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "source.zendesk_source.zendesk.ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "source.zendesk_source.zendesk.user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "source.zendesk_source.zendesk.user": ["model.zendesk_source.stg_zendesk__user_tmp"], "source.zendesk_source.zendesk.schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "source.zendesk_source.zendesk.ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "source.zendesk_source.zendesk.ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "source.zendesk_source.zendesk.ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "source.zendesk_source.zendesk.ticket_field_history": ["model.zendesk.int_zendesk__field_history_pivot", "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "source.zendesk_source.zendesk.daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "source.zendesk_source.zendesk.time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "source.zendesk_source.zendesk.schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {}, "unit_tests": {}} \ No newline at end of file diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 7820394d..56b2fff4 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -29,6 +29,7 @@ vars: ## Uncomment for docs generation # zendesk__unstructured_enabled: True + # using_schedule_histories: True ## For validation testing. To be commented out before release. # zendesk_schema: zendesk_test_env diff --git a/models/intermediate/int_zendesk__schedule_history.sql b/models/intermediate/int_zendesk__schedule_history.sql index 8b531f4c..f52a6613 100644 --- a/models/intermediate/int_zendesk__schedule_history.sql +++ b/models/intermediate/int_zendesk__schedule_history.sql @@ -1,4 +1,4 @@ -{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_histories'])) }} +{{ config(enabled=var('using_schedules', True) and var('using_schedule_histories', False)) }} with audit_logs as ( select diff --git a/models/intermediate/int_zendesk__schedule_holiday.sql b/models/intermediate/int_zendesk__schedule_holiday.sql index bbc064f5..ec265776 100644 --- a/models/intermediate/int_zendesk__schedule_holiday.sql +++ b/models/intermediate/int_zendesk__schedule_holiday.sql @@ -1,4 +1,4 @@ -{{ config(enabled=fivetran_utils.enabled_vars(['using_schedules','using_schedule_holidays'])) }} +{{ config(enabled=var('using_schedules', True) and var('using_holidays', True)) }} /* The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 18854bbd..3444cfb8 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -55,7 +55,7 @@ with schedule_timezones as ( count(*) over (partition by schedule_id, start_time_utc, schedule_valid_from) as max_valid_from_index from join_holidays --- Label the partition start and add a row for to account for the partition end if there are multiple valid periods. +-- Label the partition start and add a row to account for the partition end if there are multiple valid periods. ), add_partition_end_row as( select schedule_id, diff --git a/models/intermediate/int_zendesk__schedule_timezones.sql b/models/intermediate/int_zendesk__schedule_timezones.sql index 5ada302e..29051827 100644 --- a/models/intermediate/int_zendesk__schedule_timezones.sql +++ b/models/intermediate/int_zendesk__schedule_timezones.sql @@ -56,7 +56,7 @@ with split_timezones as ( lower(time_zone) as time_zone, schedule_name, cast(null as date) as valid_from, -- created_at is when the schedule was first ever created, so we'll fill this value later - cast({{ dbt.current_timestamp() }} as date) as valid_until, + cast({{ dbt.dateadd('day', 7, dbt.current_timestamp()) }} as date) as valid_until, False as is_historical from schedule From 855dc77fe8957dedc51651e821e4719dc3f86fe9 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Wed, 9 Oct 2024 17:52:49 -0500 Subject: [PATCH 72/76] address review comments --- models/intermediate/int_zendesk__schedule_timezones.sql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/models/intermediate/int_zendesk__schedule_timezones.sql b/models/intermediate/int_zendesk__schedule_timezones.sql index 29051827..9f7eaca4 100644 --- a/models/intermediate/int_zendesk__schedule_timezones.sql +++ b/models/intermediate/int_zendesk__schedule_timezones.sql @@ -10,7 +10,7 @@ with split_timezones as ( max(created_at) over (partition by schedule_id) as max_created_at from {{ var('schedule') }} -{% if var('using_schedule_histories', True) %} +{% if var('using_schedule_histories', False) %} ), schedule_history as ( select * from {{ ref('int_zendesk__schedule_history') }} From 73845a9cf09757055b484b7129fc21a9620b4918 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 10 Oct 2024 13:01:48 -0500 Subject: [PATCH 73/76] Apply suggestions from code review Co-authored-by: Joe Markiewicz <74217849+fivetran-joemarkiewicz@users.noreply.github.com> --- CHANGELOG.md | 10 +++++++--- README.md | 4 ++-- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c6179ad4..151fbe54 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,13 +3,13 @@ ## Breaking Changes (Full refresh required after upgrading) ### Schedule Change Support -- Support for schedule changes has been added. This feature is disabled by default, but can be enabled by setting variable `using_schedule_histories` to `true` in `dbt_project.yml`: +- Support for schedule changes has been added. This feature is disabled by default, but can be enabled by setting variable `using_schedule_histories` to `true` in your `dbt_project.yml`: ```yml vars: using_schedule_histories: true ``` - - Schedule changes can now extracted directly from the audit log, providing a view of schedule modifications over time. - - The `int_zendesk__schedule_spine` model is now incorporates these schedule changes, making it possible for downstream models to reflect the most up-to-date schedule data. + - Schedule changes can now be extracted directly from the audit log, providing a view of schedule modifications over time. + - The `int_zendesk__schedule_spine` model is now able to incorporate these schedule changes, making it possible for downstream models to reflect the most up-to-date schedule data. - Note this is only in effect when `using_schedule_histories` is true. - This improves granularity for Zendesk metrics related to agent availability, SLA tracking, and time-based performance analysis. ### dbt_zendesk_source changes (see the [Release Notes](https://github.com/fivetran/dbt_zendesk_source/releases/tag/v0.13.0) for more details) @@ -20,9 +20,13 @@ vars: - Holiday support: Users can now choose to disable holiday tracking by setting variable `using_holidays` to `false` in `dbt_project.yml`. - New intermediate models have been introduced to streamline both the readability and maintainability: - `int_zendesk__timezone_daylight`: A utility model that maintains a record of daylight savings adjustments for each time zone. + - materialization: ephemeral - `int_zendesk__schedule_history`: Captures a full history of schedule changes for each `schedule_id`. + - materialization: table (if enabled) - `int_zendesk__schedule_timezones`: Merges schedule history with time zone shifts. + - materialization: ephemeral - `int_zendesk__schedule_holidays`: Identifies and calculates holiday periods for each schedule. + - materialization: ephemeral - Rebuilt logic in `int_zendesk__schedule_spine` to consolidate updates from the new intermediate models. ### dbt_zendesk_source changes (see the [Release Notes](https://github.com/fivetran/dbt_zendesk_source/releases/tag/v0.13.0) for more details) - Updated the `stg_zendesk__schedule_holidays` model to allow users to disable holiday processing by setting variable `using_holidays` to `false`. diff --git a/README.md b/README.md index 116c4f79..b0847d7b 100644 --- a/README.md +++ b/README.md @@ -80,11 +80,11 @@ vars: ``` ### Step 4: Enable/Disable models for non-existent sources -This package takes into consideration that not every Zendesk Support account utilizes the `schedule`, `schedule_holiday`, `audit_log`, `domain_name`, `user_tag`, `organization_tag`, or `ticket_form_history` features, and allows you to disable the corresponding functionality. By default, all variables' values are assumed to be `true`, except for `using_schedule_histories`. Add variables for only the tables you want to enable/disable: +This package takes into consideration that not every Zendesk Support account utilizes the `schedule`, `schedule_holiday`, `ticket_schedule`, `daylight_time`, `time_zone`, `audit_log`, `domain_name`, `user_tag`, `organization_tag`, or `ticket_form_history` features, and allows you to disable the corresponding functionality. By default, all variables' values are assumed to be `true`, except for `using_schedule_histories`. Add variables for only the tables you want to enable/disable: ```yml vars: using_schedule_histories: True #Enable if you are using audit_logs for schedule histories - using_schedules: False #Disable if you are not using schedules + using_schedules: False #Disable if you are not using schedules, which requires source tables `ticket_schedule`, `daylight_time`, and `time_zone` using_holidays: False #Disable if you are not using schedule_holidays for holidays using_domain_names: False #Disable if you are not using domain names using_user_tags: False #Disable if you are not using user tags From fd879c5c37ed2b3dc90242e0611789bab24f3dd3 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 10 Oct 2024 14:48:16 -0500 Subject: [PATCH 74/76] address review comments --- .buildkite/scripts/run_models.sh | 3 ++- CHANGELOG.md | 19 +++++++++---------- README.md | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.buildkite/scripts/run_models.sh b/.buildkite/scripts/run_models.sh index eb16fa9a..e394504b 100644 --- a/.buildkite/scripts/run_models.sh +++ b/.buildkite/scripts/run_models.sh @@ -20,10 +20,11 @@ dbt seed --target "$db" --full-refresh dbt run --target "$db" --full-refresh dbt run --target "$db" dbt test --target "$db" -dbt run --vars '{using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh +dbt run --vars '{zendesk__unstructured_enabled: true, using_schedules: false, using_domain_names: false, using_user_tags: false, using_ticket_form_history: false, using_organization_tags: false}' --target "$db" --full-refresh dbt test --target "$db" dbt run --vars '{using_schedule_histories: true, using_holidays: false}' --target "$db" --full-refresh dbt test --target "$db" +dbt run --vars '{using_schedule_histories: true, using_holidays: true}' --target "$db" --full-refresh dbt test --target "$db" dbt run-operation fivetran_utils.drop_schemas_automation --target "$db" \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 151fbe54..17154a51 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,7 @@ ## Breaking Changes (Full refresh required after upgrading) ### Schedule Change Support -- Support for schedule changes has been added. This feature is disabled by default, but can be enabled by setting variable `using_schedule_histories` to `true` in your `dbt_project.yml`: +- Support for schedule changes has been added. This feature is disabled by default since most users do not sync the required source `audit_table`. To enable this feature set the variable `using_schedule_histories` to `true` in your `dbt_project.yml`: ```yml vars: using_schedule_histories: true @@ -12,25 +12,24 @@ vars: - The `int_zendesk__schedule_spine` model is now able to incorporate these schedule changes, making it possible for downstream models to reflect the most up-to-date schedule data. - Note this is only in effect when `using_schedule_histories` is true. - This improves granularity for Zendesk metrics related to agent availability, SLA tracking, and time-based performance analysis. -### dbt_zendesk_source changes (see the [Release Notes](https://github.com/fivetran/dbt_zendesk_source/releases/tag/v0.13.0) for more details) +#### dbt_zendesk_source changes (see the [Release Notes](https://github.com/fivetran/dbt_zendesk_source/releases/tag/v0.13.0) for more details) - Introduced the `stg_zendesk__audit_log` table for capturing schedule changes from Zendesk's audit log. - This model is disabled by default, to enable it set variable `using_schedule_histories` to `true` in `dbt_project.yml`. ## New Features -- Holiday support: Users can now choose to disable holiday tracking by setting variable `using_holidays` to `false` in `dbt_project.yml`. +- Holiday support: Users can now choose to disable holiday tracking, while continuing to use schedules, by setting variable `using_holidays` to `false` in `dbt_project.yml`. - New intermediate models have been introduced to streamline both the readability and maintainability: - - `int_zendesk__timezone_daylight`: A utility model that maintains a record of daylight savings adjustments for each time zone. + - [`int_zendesk__timezone_daylight`](https://github.com/fivetran/dbt_zendesk/blob/main/models/intermediate/int_zendesk__timezone_daylight.sql): A utility model that maintains a record of daylight savings adjustments for each time zone. - materialization: ephemeral - - `int_zendesk__schedule_history`: Captures a full history of schedule changes for each `schedule_id`. + - [`int_zendesk__schedule_history`](https://github.com/fivetran/dbt_zendesk/blob/main/models/intermediate/int_zendesk__schedule_history.sql): Captures a full history of schedule changes for each `schedule_id`. - materialization: table (if enabled) - - `int_zendesk__schedule_timezones`: Merges schedule history with time zone shifts. + - [`int_zendesk__schedule_timezones`](https://github.com/fivetran/dbt_zendesk/blob/main/models/intermediate/int_zendesk__schedule_timezones.sql): Merges schedule history with time zone shifts. - materialization: ephemeral - - `int_zendesk__schedule_holidays`: Identifies and calculates holiday periods for each schedule. + - [`int_zendesk__schedule_holiday`](https://github.com/fivetran/dbt_zendesk/blob/main/models/intermediate/int_zendesk__schedule_holiday.sql): Identifies and calculates holiday periods for each schedule. - materialization: ephemeral -- Rebuilt logic in `int_zendesk__schedule_spine` to consolidate updates from the new intermediate models. -### dbt_zendesk_source changes (see the [Release Notes](https://github.com/fivetran/dbt_zendesk_source/releases/tag/v0.13.0) for more details) +- Rebuilt logic in [`int_zendesk__schedule_spine`](https://github.com/fivetran/dbt_zendesk/blob/main/models/intermediate/int_zendesk__schedule_spine.sql) to consolidate updates from the new intermediate models. +#### dbt_zendesk_source changes (see the [Release Notes](https://github.com/fivetran/dbt_zendesk_source/releases/tag/v0.13.0) for more details) - Updated the `stg_zendesk__schedule_holidays` model to allow users to disable holiday processing by setting variable `using_holidays` to `false`. -- Added field-level documentation for the `stg_zendesk__audit_log` table. ## Bug Fixes - Resolved a bug in the `int_zendesk__schedule_spine` model where users experienced large gaps in non-holiday periods. The updated logic addresses this issue. diff --git a/README.md b/README.md index b0847d7b..96c242f3 100644 --- a/README.md +++ b/README.md @@ -84,7 +84,7 @@ This package takes into consideration that not every Zendesk Support account uti ```yml vars: using_schedule_histories: True #Enable if you are using audit_logs for schedule histories - using_schedules: False #Disable if you are not using schedules, which requires source tables `ticket_schedule`, `daylight_time`, and `time_zone` + using_schedules: False #Disable if you are not using schedules, which requires source tables ticket_schedule, daylight_time, and time_zone using_holidays: False #Disable if you are not using schedule_holidays for holidays using_domain_names: False #Disable if you are not using domain names using_user_tags: False #Disable if you are not using user tags From 360911f3c5ddc86877e01da18f5c2d6f1d1b3db7 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 10 Oct 2024 15:27:40 -0500 Subject: [PATCH 75/76] Update packages.yml --- packages.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/packages.yml b/packages.yml index efe428a1..849ed121 100644 --- a/packages.yml +++ b/packages.yml @@ -1,8 +1,5 @@ packages: - # - package: fivetran/zendesk_source - # version: [">=0.12.0", "<0.13.0"] - - git: https://github.com/fivetran/dbt_zendesk_source.git - revision: feature/historical-schedules - warn-unpinned: false + - package: fivetran/zendesk_source + version: [">=0.13.0", "<0.14.0"] - package: calogica/dbt_date version: [">=0.9.0", "<1.0.0"] From 56cd177ac13dc7cb89c5408c4efa711795571b64 Mon Sep 17 00:00:00 2001 From: fivetran-catfritz <111930712+fivetran-catfritz@users.noreply.github.com> Date: Thu, 10 Oct 2024 15:55:11 -0500 Subject: [PATCH 76/76] release review updates --- CHANGELOG.md | 2 +- models/intermediate/int_zendesk__schedule_holiday.sql | 7 ------- models/intermediate/int_zendesk__schedule_spine.sql | 4 ++-- 3 files changed, 3 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 17154a51..e5b53d17 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,7 +19,7 @@ vars: ## New Features - Holiday support: Users can now choose to disable holiday tracking, while continuing to use schedules, by setting variable `using_holidays` to `false` in `dbt_project.yml`. - New intermediate models have been introduced to streamline both the readability and maintainability: - - [`int_zendesk__timezone_daylight`](https://github.com/fivetran/dbt_zendesk/blob/main/models/intermediate/int_zendesk__timezone_daylight.sql): A utility model that maintains a record of daylight savings adjustments for each time zone. + - [`int_zendesk__timezone_daylight`](https://github.com/fivetran/dbt_zendesk/blob/main/models/utils/int_zendesk__timezone_daylight.sql): A utility model that maintains a record of daylight savings adjustments for each time zone. - materialization: ephemeral - [`int_zendesk__schedule_history`](https://github.com/fivetran/dbt_zendesk/blob/main/models/intermediate/int_zendesk__schedule_history.sql): Captures a full history of schedule changes for each `schedule_id`. - materialization: table (if enabled) diff --git a/models/intermediate/int_zendesk__schedule_holiday.sql b/models/intermediate/int_zendesk__schedule_holiday.sql index ec265776..99ab976b 100644 --- a/models/intermediate/int_zendesk__schedule_holiday.sql +++ b/models/intermediate/int_zendesk__schedule_holiday.sql @@ -1,12 +1,5 @@ {{ config(enabled=var('using_schedules', True) and var('using_holidays', True)) }} -/* - The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may - change due to Daylight Savings. End result will include `valid_from` and `valid_until` columns which we will use downstream - to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time). -*/ - - with schedule as ( select * from {{ var('schedule') }} diff --git a/models/intermediate/int_zendesk__schedule_spine.sql b/models/intermediate/int_zendesk__schedule_spine.sql index 3444cfb8..c16c404c 100644 --- a/models/intermediate/int_zendesk__schedule_spine.sql +++ b/models/intermediate/int_zendesk__schedule_spine.sql @@ -180,7 +180,7 @@ with schedule_timezones as ( case when change_type = 'holiday' then ({{ dbt.datediff('holiday_starting_sunday', 'holiday_valid_until', 'minute') }} + 24 * 60 -- add 1 day to set the upper bound of the holiday - - offset_minutes)-- timezone adjustment + - offset_minutes) -- timezone adjustment else null end as holiday_valid_until_minutes_from_week_start from holiday_weeks @@ -214,7 +214,7 @@ with schedule_timezones as ( union all - -- CFount the number of records for each schedule start_time_utc and end_time_utc for filtering later. + -- Count the number of records for each schedule start_time_utc and end_time_utc for filtering later. select distinct *, cast(count(*) over (partition by schedule_id, valid_from, valid_until, start_time_utc, end_time_utc, holiday_name)